This commit is contained in:
nym21
2025-11-14 12:09:58 +01:00
parent 1d2c927d94
commit e8f77ab2e5
46 changed files with 1400 additions and 1394 deletions

291
Cargo.lock generated
View File

@@ -363,6 +363,24 @@ dependencies = [
"virtue",
]
[[package]]
name = "bindgen"
version = "0.72.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895"
dependencies = [
"bitflags 2.10.0",
"cexpr",
"clang-sys",
"itertools 0.13.0",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
"syn 2.0.110",
]
[[package]]
name = "bit-set"
version = "0.8.0"
@@ -534,6 +552,7 @@ version = "0.0.111"
dependencies = [
"brk_error",
"brk_logger",
"libproc",
"parking_lot",
]
@@ -595,6 +614,7 @@ name = "brk_computer"
version = "0.0.111"
dependencies = [
"bitcoin",
"brk_bencher",
"brk_error",
"brk_fetcher",
"brk_grouper",
@@ -627,7 +647,7 @@ dependencies = [
"fjall",
"jiff",
"minreq",
"sonic-rs",
"serde_json",
"tokio",
"vecdb",
"zerocopy",
@@ -642,7 +662,7 @@ dependencies = [
"brk_types",
"log",
"minreq",
"sonic-rs",
"serde_json",
]
[[package]]
@@ -853,7 +873,7 @@ dependencies = [
"dunce",
"futures",
"indexmap",
"itertools",
"itertools 0.14.0",
"itoa",
"json-escape-simd",
"memchr",
@@ -893,7 +913,7 @@ dependencies = [
"dashmap",
"derive_more",
"fast-glob",
"itertools",
"itertools 0.14.0",
"num-bigint",
"oxc",
"oxc_ecmascript",
@@ -1094,7 +1114,7 @@ dependencies = [
"brk_rolldown_fs",
"brk_rolldown_utils",
"dashmap",
"itertools",
"itertools 0.14.0",
"oxc_resolver",
"sugar_path",
]
@@ -1154,7 +1174,7 @@ dependencies = [
"itoa",
"memchr",
"mime",
"nom",
"nom 8.0.0",
"oxc",
"oxc_index",
"phf",
@@ -1216,7 +1236,8 @@ dependencies = [
"log",
"quick_cache",
"schemars",
"sonic-rs",
"serde",
"serde_json",
"tokio",
"tower-http",
"tracing",
@@ -1273,7 +1294,7 @@ version = "0.0.111"
dependencies = [
"bitcoin",
"brk_error",
"byteview 0.6.1",
"byteview 0.8.0",
"derive_deref",
"itoa",
"jiff",
@@ -1376,6 +1397,15 @@ dependencies = [
"shlex",
]
[[package]]
name = "cexpr"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom 7.1.3",
]
[[package]]
name = "cfb"
version = "0.7.3"
@@ -1413,6 +1443,17 @@ dependencies = [
"windows-link",
]
[[package]]
name = "clang-sys"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
dependencies = [
"glob",
"libc",
"libloading",
]
[[package]]
name = "clap"
version = "4.5.51"
@@ -1707,9 +1748,9 @@ checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-common"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
"generic-array",
"typenum",
@@ -2040,18 +2081,6 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "faststr"
version = "0.2.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baec6a0289d7f1fe5665586ef7340af82e3037207bef60f5785e57569776f0c8"
dependencies = [
"bytes",
"rkyv",
"serde",
"simdutf8",
]
[[package]]
name = "fdeflate"
version = "0.3.7"
@@ -2087,14 +2116,14 @@ checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
[[package]]
name = "fjall"
version = "3.0.0-pre.5"
version = "3.0.0-pre.6"
dependencies = [
"byteorder-lite",
"byteview 0.8.0",
"dashmap",
"flume",
"log",
"lsm-tree 3.0.0-pre.5",
"lsm-tree 3.0.0-pre.6",
"lz4_flex",
"tempfile",
"xxhash-rust",
@@ -2325,9 +2354,9 @@ dependencies = [
[[package]]
name = "generic-array"
version = "0.14.9"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
@@ -2372,6 +2401,12 @@ version = "0.32.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7"
[[package]]
name = "glob"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]]
name = "guardian"
version = "1.3.0"
@@ -2493,9 +2528,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "hyper"
version = "1.7.0"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e"
checksum = "1744436df46f0bde35af3eda22aeaba453aada65d8f1c171cd8a5f59030bd69f"
dependencies = [
"atomic-waker",
"bytes",
@@ -2736,6 +2771,15 @@ version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.14.0"
@@ -2887,6 +2931,17 @@ dependencies = [
"windows-link",
]
[[package]]
name = "libproc"
version = "0.14.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a54ad7278b8bc5301d5ffd2a94251c004feb971feba96c971ea4063645990757"
dependencies = [
"bindgen",
"errno",
"libc",
]
[[package]]
name = "libredox"
version = "0.1.10"
@@ -2960,7 +3015,7 @@ dependencies = [
[[package]]
name = "lsm-tree"
version = "3.0.0-pre.5"
version = "3.0.0-pre.6"
dependencies = [
"byteorder-lite",
"byteview 0.8.0",
@@ -3014,6 +3069,12 @@ version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.8.9"
@@ -3049,26 +3110,6 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "munge"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e17401f259eba956ca16491461b6e8f72913a0a114e39736ce404410f915a0c"
dependencies = [
"munge_macro",
]
[[package]]
name = "munge_macro"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4568f25ccbd45ab5d5603dc34318c1ec56b117531781260002151b8530a9f931"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]]
name = "nibble_vec"
version = "0.1.0"
@@ -3090,6 +3131,16 @@ dependencies = [
"libc",
]
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "nom"
version = "8.0.0"
@@ -3280,7 +3331,7 @@ checksum = "f978be538ca5e2a64326d24b7991dc658cc8495132833ae387212ab3b8abd70a"
dependencies = [
"bincode",
"flate2",
"nom",
"nom 8.0.0",
"rustc-hash",
"serde",
"serde_json",
@@ -3377,7 +3428,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "388f6e0dd1a825de3a11d94af540726539cccd428651fee3bab841da196549d4"
dependencies = [
"bitflags 2.10.0",
"itertools",
"itertools 0.14.0",
"oxc_index",
"oxc_syntax",
"petgraph",
@@ -3498,7 +3549,7 @@ version = "0.96.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba3695e3b0694093d24f0f7c8b77dc36b1a4b55020bf52166271010c8095bba5"
dependencies = [
"itertools",
"itertools 0.14.0",
"oxc_allocator",
"oxc_ast",
"oxc_data_structures",
@@ -3575,9 +3626,9 @@ dependencies = [
[[package]]
name = "oxc_resolver"
version = "11.13.1"
version = "11.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ab3f270c313ac7f814ce73a64f4bdf36a183dcae4593b2f96c6e6afea8c99d0"
checksum = "0f1af25f894076eedc44509ad0cc33afb829aa06ec3f23e395f47bcbc1c6e964"
dependencies = [
"cfg-if",
"indexmap",
@@ -3604,7 +3655,7 @@ version = "0.96.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c56958658ca1f9f5f050dc4317821255d2ca132763b6fbee9227e45ef79ed173"
dependencies = [
"itertools",
"itertools 0.14.0",
"oxc_allocator",
"oxc_ast",
"oxc_ast_visit",
@@ -4065,26 +4116,6 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "ptr_meta"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b9a0cf95a1196af61d4f1cbdab967179516d9a4a4312af1f31948f8f6224a79"
dependencies = [
"ptr_meta_derive",
]
[[package]]
name = "ptr_meta_derive"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7347867d0a7e1208d93b46767be83e2b8f978c3dad35f775ac8d8847551d6fe1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]]
name = "quick_cache"
version = "0.6.18"
@@ -4131,15 +4162,6 @@ dependencies = [
"nibble_vec",
]
[[package]]
name = "rancor"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a063ea72381527c2a0561da9c80000ef822bdd7c3241b1cc1b12100e3df081ee"
dependencies = [
"ptr_meta",
]
[[package]]
name = "rand"
version = "0.8.5"
@@ -4219,7 +4241,7 @@ dependencies = [
[[package]]
name = "rawdb"
version = "0.3.16"
version = "0.3.20"
dependencies = [
"libc",
"log",
@@ -4327,12 +4349,6 @@ dependencies = [
"memchr",
]
[[package]]
name = "rend"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cadadef317c2f20755a64d7fdc48f9e7178ee6b0e1f7fce33fa60f1d68a276e6"
[[package]]
name = "ring"
version = "0.17.14"
@@ -4347,35 +4363,6 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "rkyv"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35a640b26f007713818e9a9b65d34da1cf58538207b052916a83d80e43f3ffa4"
dependencies = [
"bytes",
"hashbrown 0.15.5",
"indexmap",
"munge",
"ptr_meta",
"rancor",
"rend",
"rkyv_derive",
"tinyvec",
"uuid",
]
[[package]]
name = "rkyv_derive"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd83f5f173ff41e00337d97f6572e416d022ef8a19f371817259ae960324c482"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]]
name = "rolldown-ariadne"
version = "0.5.3"
@@ -4816,45 +4803,6 @@ dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "sonic-number"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8a74044c092f4f43ca7a6cfd62854cf9fb5ac8502b131347c990bf22bef1dfe"
dependencies = [
"cfg-if",
]
[[package]]
name = "sonic-rs"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4425ea8d66ec950e0a8f2ef52c766cc3d68d661d9a0845c353c40833179fd866"
dependencies = [
"ahash",
"bumpalo",
"bytes",
"cfg-if",
"faststr",
"itoa",
"ref-cast",
"ryu",
"serde",
"simdutf8",
"sonic-number",
"sonic-simd",
"thiserror 2.0.17",
]
[[package]]
name = "sonic-simd"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5707edbfb34a40c9f2a55fa09a49101d9fec4e0cc171ce386086bd9616f34257"
dependencies = [
"cfg-if",
]
[[package]]
name = "spin"
version = "0.9.8"
@@ -5087,21 +5035,6 @@ dependencies = [
"zerovec",
]
[[package]]
name = "tinyvec"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa"
dependencies = [
"tinyvec_macros",
]
[[package]]
name = "tinyvec_macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.48.0"
@@ -5524,7 +5457,7 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]]
name = "vecdb"
version = "0.3.16"
version = "0.3.20"
dependencies = [
"ctrlc",
"log",
@@ -5533,14 +5466,14 @@ dependencies = [
"rawdb",
"serde",
"serde_derive",
"sonic-rs",
"serde_json",
"vecdb_derive",
"zerocopy",
]
[[package]]
name = "vecdb_derive"
version = "0.3.16"
version = "0.3.20"
dependencies = [
"quote",
"syn 2.0.110",
@@ -5661,9 +5594,9 @@ checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1"
[[package]]
name = "weezl"
version = "0.1.11"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "009936b22a61d342859b5f0ea64681cbb35a358ab548e2a44a8cf0dac2d980b8"
checksum = "a28ac98ddc8b9274cb41bb4d9d4d5c425b6020c50c46f25559911905610b4a88"
[[package]]
name = "winapi"

View File

@@ -58,8 +58,8 @@ brk_store = { version = "0.0.111", path = "crates/brk_store" }
brk_types = { version = "0.0.111", path = "crates/brk_types" }
brk_traversable = { version = "0.0.111", path = "crates/brk_traversable", features = ["derive"] }
brk_traversable_derive = { version = "0.0.111", path = "crates/brk_traversable_derive" }
byteview = "=0.6.1"
# byteview = "~0.8.0"
# byteview = "=0.6.1"
byteview = "~0.8.0"
derive_deref = "1.1.1"
# fjall2 = { version = "2.11.5", package = "brk_fjall" }
fjall2 = { path = "../fjall2", package = "brk_fjall" }
@@ -78,10 +78,9 @@ serde = "1.0.228"
serde_bytes = "0.11.19"
serde_derive = "1.0.228"
serde_json = { version = "1.0.145", features = ["float_roundtrip"] }
sonic-rs = "0.5.6"
tokio = { version = "1.48.0", features = ["rt-multi-thread"] }
vecdb = { path = "../seqdb/crates/vecdb", features = ["derive"] }
# vecdb = { version = "0.3.16", features = ["derive"] }
# vecdb = { version = "0.3.20", features = ["derive"] }
zerocopy = { version = "0.8.27", features = ["derive"] }
[workspace.metadata.release]

View File

@@ -13,3 +13,6 @@ build = "build.rs"
brk_error = { workspace = true }
brk_logger = { workspace = true }
parking_lot = { workspace = true }
[target.'cfg(target_os = "macos")'.dependencies]
libproc = "0.14"

View File

@@ -1,24 +1,34 @@
use std::collections::HashMap;
use std::fs;
use std::io;
use std::fs::{self, File};
use std::io::{self, Write};
use std::os::unix::fs::MetadataExt;
use std::path::{Path, PathBuf};
use std::time::SystemTime;
pub struct DiskMonitor {
cache: HashMap<PathBuf, (u64, SystemTime)>, // path -> (bytes_used, mtime)
monitored_path: PathBuf,
writer: File,
}
impl DiskMonitor {
pub fn new() -> Self {
Self {
pub fn new(monitored_path: &Path, csv_path: &Path) -> io::Result<Self> {
let mut writer = File::create(csv_path)?;
writeln!(writer, "timestamp_ms,disk_usage")?;
Ok(Self {
cache: HashMap::new(),
}
monitored_path: monitored_path.to_path_buf(),
writer,
})
}
/// Get disk usage in bytes (matches `du` and Finder)
pub fn get_disk_usage(&mut self, path: &Path) -> io::Result<u64> {
self.scan_recursive(path)
/// Record disk usage at the given timestamp
pub fn record(&mut self, elapsed_ms: u128) -> io::Result<()> {
if let Ok(bytes) = self.scan_recursive(&self.monitored_path.clone()) {
writeln!(self.writer, "{},{}", elapsed_ms, bytes)?;
}
Ok(())
}
fn scan_recursive(&mut self, path: &Path) -> io::Result<u64> {

View File

@@ -0,0 +1,81 @@
use std::fs::File;
use std::io::{self, Write};
use std::path::Path;
#[cfg(target_os = "linux")]
use std::fs;
#[cfg(target_os = "macos")]
use libproc::pid_rusage::{pidrusage, RUsageInfoV2};
pub struct IoMonitor {
pid: u32,
writer: File,
}
impl IoMonitor {
pub fn new(pid: u32, csv_path: &Path) -> io::Result<Self> {
let mut writer = File::create(csv_path)?;
writeln!(writer, "timestamp_ms,bytes_read,bytes_written")?;
Ok(Self { pid, writer })
}
/// Record I/O usage at the given timestamp
pub fn record(&mut self, elapsed_ms: u128) -> io::Result<()> {
if let Ok((read, written)) = self.get_io_usage() {
writeln!(self.writer, "{},{},{}", elapsed_ms, read, written)?;
}
Ok(())
}
/// Get I/O usage in bytes
/// Returns (bytes_read, bytes_written)
fn get_io_usage(&self) -> io::Result<(u64, u64)> {
#[cfg(target_os = "linux")]
{
self.get_io_usage_linux()
}
#[cfg(target_os = "macos")]
{
self.get_io_usage_macos()
}
}
#[cfg(target_os = "linux")]
fn get_io_usage_linux(&self) -> io::Result<(u64, u64)> {
let io_content = fs::read_to_string(format!("/proc/{}/io", self.pid))?;
let mut read_bytes = None;
let mut write_bytes = None;
for line in io_content.lines() {
if line.starts_with("read_bytes:") {
if let Some(value_str) = line.split_whitespace().nth(1) {
read_bytes = value_str.parse::<u64>().ok();
}
} else if line.starts_with("write_bytes:") {
if let Some(value_str) = line.split_whitespace().nth(1) {
write_bytes = value_str.parse::<u64>().ok();
}
}
}
match (read_bytes, write_bytes) {
(Some(r), Some(w)) => Ok((r, w)),
_ => Err(io::Error::new(
io::ErrorKind::InvalidData,
"Failed to parse I/O stats from /proc/[pid]/io",
)),
}
}
#[cfg(target_os = "macos")]
fn get_io_usage_macos(&self) -> io::Result<(u64, u64)> {
match pidrusage::<RUsageInfoV2>(self.pid as i32) {
Ok(info) => Ok((info.ri_diskio_bytesread, info.ri_diskio_byteswritten)),
Err(_) => Err(io::Error::other("Failed to get process I/O stats")),
}
}
}

View File

@@ -1,6 +1,5 @@
use std::{
fs,
io::{self, Write},
path::{Path, PathBuf},
sync::{
Arc,
@@ -13,10 +12,12 @@ use std::{
use brk_error::Result;
mod disk;
mod io;
mod memory;
mod progression;
use disk::*;
use io::*;
use memory::*;
use parking_lot::Mutex;
use progression::*;
@@ -53,7 +54,7 @@ impl Bencher {
brk_logger::register_hook(move |message| {
progression_clone.check_and_record(message);
})
.map_err(|e| io::Error::new(io::ErrorKind::AlreadyExists, e))?;
.map_err(|e| std::io::Error::new(std::io::ErrorKind::AlreadyExists, e))?;
Ok(Self(Arc::new(BencherInner {
bench_dir,
@@ -132,36 +133,20 @@ fn monitor_resources(
bench_dir: &Path,
stop_flag: Arc<AtomicBool>,
) -> Result<()> {
let disk_file = bench_dir.join("disk.csv");
let memory_file = bench_dir.join("memory.csv");
let mut disk_writer = fs::File::create(disk_file)?;
let mut memory_writer = fs::File::create(memory_file)?;
writeln!(disk_writer, "timestamp_ms,disk_usage")?;
writeln!(
memory_writer,
"timestamp_ms,phys_footprint,phys_footprint_peak"
)?;
let pid = std::process::id();
let start = Instant::now();
let mut disk_monitor = DiskMonitor::new();
let memory_monitor = MemoryMonitor::new(pid);
let mut disk_monitor = DiskMonitor::new(monitored_path, &bench_dir.join("disk.csv"))?;
let mut memory_monitor = MemoryMonitor::new(pid, &bench_dir.join("memory.csv"))?;
let mut io_monitor = IoMonitor::new(pid, &bench_dir.join("io.csv"))?;
'l: loop {
let elapsed_ms = start.elapsed().as_millis();
if let Ok(bytes) = disk_monitor.get_disk_usage(monitored_path) {
writeln!(disk_writer, "{},{}", elapsed_ms, bytes)?;
}
disk_monitor.record(elapsed_ms)?;
memory_monitor.record(elapsed_ms)?;
io_monitor.record(elapsed_ms)?;
if let Ok((footprint, peak)) = memory_monitor.get_memory_usage() {
writeln!(memory_writer, "{},{},{}", elapsed_ms, footprint, peak)?;
}
// Best version
for _ in 0..50 {
// 50 * 100ms = 5 seconds
if stop_flag.load(Ordering::Relaxed) {

View File

@@ -1,4 +1,6 @@
use std::io;
use std::fs::File;
use std::io::{self, Write};
use std::path::Path;
#[cfg(target_os = "linux")]
use std::fs;
@@ -8,16 +10,28 @@ use std::process::Command;
pub struct MemoryMonitor {
pid: u32,
writer: File,
}
impl MemoryMonitor {
pub fn new(pid: u32) -> Self {
Self { pid }
pub fn new(pid: u32, csv_path: &Path) -> io::Result<Self> {
let mut writer = File::create(csv_path)?;
writeln!(writer, "timestamp_ms,phys_footprint,phys_footprint_peak")?;
Ok(Self { pid, writer })
}
/// Record memory usage at the given timestamp
pub fn record(&mut self, elapsed_ms: u128) -> io::Result<()> {
if let Ok((footprint, peak)) = self.get_memory_usage() {
writeln!(self.writer, "{},{},{}", elapsed_ms, footprint, peak)?;
}
Ok(())
}
/// Get memory usage in bytes
/// Returns (current_bytes, peak_bytes)
pub fn get_memory_usage(&self) -> io::Result<(u64, u64)> {
fn get_memory_usage(&self) -> io::Result<(u64, u64)> {
#[cfg(target_os = "linux")]
{
self.get_memory_usage_linux()

View File

@@ -54,8 +54,8 @@ const CHART_COLORS: [RGBColor; 6] = [
RGBColor(255, 159, 64), // Orange
];
// Time window buffer in milliseconds (5 seconds)
const TIME_BUFFER_MS: u64 = 5000;
// Time window buffer in milliseconds
const TIME_BUFFER_MS: u64 = 10_000;
pub struct Visualizer {
workspace_root: PathBuf,
@@ -106,6 +106,7 @@ impl Visualizer {
let disk_runs = self.read_benchmark_runs(crate_path, "disk.csv")?;
let memory_runs = self.read_benchmark_runs(crate_path, "memory.csv")?;
let progress_runs = self.read_benchmark_runs(crate_path, "progress.csv")?;
let io_runs = self.read_benchmark_runs(crate_path, "io.csv")?;
// Generate combined charts (all runs together)
if !disk_runs.is_empty() {
@@ -120,6 +121,11 @@ impl Visualizer {
self.generate_progress_chart(crate_path, crate_name, &progress_runs)?;
}
if !io_runs.is_empty() {
self.generate_io_read_chart(crate_path, crate_name, &io_runs)?;
self.generate_io_write_chart(crate_path, crate_name, &io_runs)?;
}
// Generate individual charts for each run
for run in &disk_runs {
let run_path = crate_path.join(&run.run_id);
@@ -136,6 +142,12 @@ impl Visualizer {
self.generate_progress_chart(&run_path, crate_name, slice::from_ref(run))?;
}
for run in &io_runs {
let run_path = crate_path.join(&run.run_id);
self.generate_io_read_chart(&run_path, crate_name, slice::from_ref(run))?;
self.generate_io_write_chart(&run_path, crate_name, slice::from_ref(run))?;
}
Ok(())
}
@@ -153,8 +165,8 @@ impl Visualizer {
.ok_or("Invalid run ID")?
.to_string();
// Skip directories that start with underscore
if run_id.starts_with('_') {
// Skip directories that start with underscore or contain only numbers
if run_id.starts_with('_') || run_id.chars().all(|c| c.is_ascii_digit()) {
continue;
}
@@ -557,6 +569,223 @@ impl Visualizer {
Ok(enhanced_runs)
}
#[allow(clippy::type_complexity)]
fn read_io_data(
&self,
crate_path: &Path,
runs: &[BenchmarkRun],
) -> Result<Vec<(String, Vec<DataPoint>, Vec<DataPoint>)>> {
let mut io_runs = Vec::new();
for run in runs {
// For individual charts, crate_path is already the run folder
// For combined charts, we need to append run_id
let direct_path = crate_path.join("io.csv");
let nested_path = crate_path.join(&run.run_id).join("io.csv");
let csv_path = if direct_path.exists() {
direct_path
} else {
nested_path
};
if let Ok(content) = fs::read_to_string(&csv_path) {
let mut read_data = Vec::new();
let mut write_data = Vec::new();
for (i, line) in content.lines().enumerate() {
if i == 0 {
continue;
}
let parts: Vec<&str> = line.split(',').collect();
if parts.len() >= 3
&& let (Ok(timestamp_ms), Ok(bytes_read), Ok(bytes_written)) = (
parts[0].parse::<u64>(),
parts[1].parse::<f64>(),
parts[2].parse::<f64>(),
)
{
read_data.push(DataPoint {
timestamp_ms,
value: bytes_read,
});
write_data.push(DataPoint {
timestamp_ms,
value: bytes_written,
});
}
}
io_runs.push((run.run_id.clone(), read_data, write_data));
}
}
Ok(io_runs)
}
fn generate_io_read_chart(
&self,
crate_path: &Path,
crate_name: &str,
runs: &[BenchmarkRun],
) -> Result<()> {
let output_path = crate_path.join("io_read_chart.svg");
let root = SVGBackend::new(&output_path, SIZE).into_drawing_area();
root.fill(&BG_COLOR)?;
// Calculate time window based on shortest run + buffer
let min_max_time_ms = Self::calculate_min_max_time(runs) + TIME_BUFFER_MS;
let max_time_s = (min_max_time_ms as f64) / 1000.0;
// Read I/O CSV files which have 3 columns: timestamp, bytes_read, bytes_written
let io_runs = self.read_io_data(crate_path, runs)?;
// Trim I/O runs to the same time window and extract only read data
let trimmed_io_runs: Vec<_> = io_runs
.into_iter()
.map(|(run_id, read_data, _write_data)| {
let trimmed_read: Vec<_> = read_data
.into_iter()
.filter(|d| d.timestamp_ms <= min_max_time_ms)
.collect();
(run_id, trimmed_read)
})
.collect();
let max_value = trimmed_io_runs
.iter()
.flat_map(|(_, data)| data.iter().map(|d| d.value))
.fold(0.0_f64, f64::max);
let (max_value_scaled, unit) = Self::format_bytes(max_value);
let scale_factor = max_value / max_value_scaled;
// Format time based on duration
let (max_time_scaled, _time_unit, time_label) = Self::format_time(max_time_s);
let mut chart = ChartBuilder::on(&root)
.caption(
format!("{} — I/O Read", crate_name),
(FONT, FONT_SIZE_BIG).into_font().color(&TEXT_COLOR),
)
.margin(20)
.x_label_area_size(50)
.margin_left(50)
.right_y_label_area_size(75)
.build_cartesian_2d(0.0..max_time_scaled * 1.025, 0.0..max_value_scaled * 1.1)?;
configure_chart_mesh!(
chart,
time_label,
format!("Bytes Read ({})", unit),
|y: &f64| format!("{:.2}", y)
);
let time_divisor = max_time_s / max_time_scaled;
for (idx, (run_id, read_data)) in trimmed_io_runs.iter().enumerate() {
let color = CHART_COLORS[idx % CHART_COLORS.len()];
Self::draw_line_series(
&mut chart,
read_data.iter().map(|d| {
(
d.timestamp_ms as f64 / 1000.0 / time_divisor,
d.value / scale_factor,
)
}),
run_id,
color,
)?;
}
Self::configure_series_labels(&mut chart)?;
root.present()?;
println!("Generated: {}", output_path.display());
Ok(())
}
fn generate_io_write_chart(
&self,
crate_path: &Path,
crate_name: &str,
runs: &[BenchmarkRun],
) -> Result<()> {
let output_path = crate_path.join("io_write_chart.svg");
let root = SVGBackend::new(&output_path, SIZE).into_drawing_area();
root.fill(&BG_COLOR)?;
// Calculate time window based on shortest run + buffer
let min_max_time_ms = Self::calculate_min_max_time(runs) + TIME_BUFFER_MS;
let max_time_s = (min_max_time_ms as f64) / 1000.0;
// Read I/O CSV files which have 3 columns: timestamp, bytes_read, bytes_written
let io_runs = self.read_io_data(crate_path, runs)?;
// Trim I/O runs to the same time window and extract only write data
let trimmed_io_runs: Vec<_> = io_runs
.into_iter()
.map(|(run_id, _read_data, write_data)| {
let trimmed_write: Vec<_> = write_data
.into_iter()
.filter(|d| d.timestamp_ms <= min_max_time_ms)
.collect();
(run_id, trimmed_write)
})
.collect();
let max_value = trimmed_io_runs
.iter()
.flat_map(|(_, data)| data.iter().map(|d| d.value))
.fold(0.0_f64, f64::max);
let (max_value_scaled, unit) = Self::format_bytes(max_value);
let scale_factor = max_value / max_value_scaled;
// Format time based on duration
let (max_time_scaled, _time_unit, time_label) = Self::format_time(max_time_s);
let mut chart = ChartBuilder::on(&root)
.caption(
format!("{} — I/O Write", crate_name),
(FONT, FONT_SIZE_BIG).into_font().color(&TEXT_COLOR),
)
.margin(20)
.x_label_area_size(50)
.margin_left(50)
.right_y_label_area_size(75)
.build_cartesian_2d(0.0..max_time_scaled * 1.025, 0.0..max_value_scaled * 1.1)?;
configure_chart_mesh!(
chart,
time_label,
format!("Bytes Written ({})", unit),
|y: &f64| format!("{:.2}", y)
);
let time_divisor = max_time_s / max_time_scaled;
for (idx, (run_id, write_data)) in trimmed_io_runs.iter().enumerate() {
let color = CHART_COLORS[idx % CHART_COLORS.len()];
Self::draw_line_series(
&mut chart,
write_data.iter().map(|d| {
(
d.timestamp_ms as f64 / 1000.0 / time_divisor,
d.value / scale_factor,
)
}),
run_id,
color,
)?;
}
Self::configure_series_labels(&mut chart)?;
root.present()?;
println!("Generated: {}", output_path.display());
Ok(())
}
fn generate_progress_chart(
&self,
crate_path: &Path,

View File

@@ -11,6 +11,7 @@ build = "build.rs"
[dependencies]
bitcoin = { workspace = true }
brk_bencher = { workspace = true }
brk_error = { workspace = true }
brk_fetcher = { workspace = true }
brk_grouper = { workspace = true }

View File

@@ -0,0 +1,73 @@
use std::{env, path::Path, thread, time::Instant};
use brk_bencher::Bencher;
use brk_computer::Computer;
use brk_error::Result;
use brk_fetcher::Fetcher;
use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use log::{debug, info};
use vecdb::Exit;
pub fn main() -> Result<()> {
// Can't increase main thread's stack size, thus we need to use another thread
thread::Builder::new()
.stack_size(512 * 1024 * 1024)
.spawn(run)?
.join()
.unwrap()
}
fn run() -> Result<()> {
brk_logger::init(None)?;
let bitcoin_dir = Client::default_bitcoin_path();
// let bitcoin_dir = Path::new("/Volumes/WD_BLACK/bitcoin");
let outputs_dir = Path::new(&env::var("HOME").unwrap()).join(".brk");
let outputs_benches_dir = outputs_dir.join("benches");
// let outputs_dir = Path::new("../../_outputs");
let client = Client::new(
Client::default_url(),
Auth::CookieFile(bitcoin_dir.join(".cookie")),
)?;
let reader = Reader::new(bitcoin_dir.join("blocks"), &client);
let blocks = Blocks::new(&client, &reader);
let mut indexer = Indexer::forced_import(&outputs_dir)?;
let fetcher = Fetcher::import(true, None)?;
let mut computer = Computer::forced_import(&outputs_benches_dir, &indexer, Some(fetcher))?;
let mut bencher =
Bencher::from_cargo_env(env!("CARGO_PKG_NAME"), &outputs_dir.join("computed"))?;
bencher.start()?;
let exit = Exit::new();
exit.set_ctrlc_handler();
let bencher_clone = bencher.clone();
exit.register_cleanup(move || {
let _ = bencher_clone.stop();
debug!("Bench stopped.");
});
let i = Instant::now();
let starting_indexes = indexer.checked_index(&blocks, &client, &exit)?;
info!("Done in {:?}", i.elapsed());
let i = Instant::now();
computer.compute(&indexer, starting_indexes, &reader, &exit)?;
info!("Done in {:?}", i.elapsed());
// We want to benchmark the drop too
drop(computer);
drop(indexer);
Ok(())
}

View File

@@ -48,6 +48,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}

View File

@@ -10,8 +10,8 @@ use brk_types::{
TxVersion, Version, WeekIndex, Weight, YearIndex,
};
use vecdb::{
Database, EagerVec, Exit, IterableCloneableVec, IterableVec, LazyVecFrom1, LazyVecFrom2,
LazyVecFrom3, PAGE_SIZE, TypedVecIterator, VecIndex,
AnyVec, Database, EagerVec, Exit, GenericStoredVec, IterableCloneableVec, IterableVec,
LazyVecFrom1, LazyVecFrom2, PAGE_SIZE, TypedVecIterator, VecIndex, unlikely,
};
use crate::grouped::{
@@ -71,13 +71,11 @@ pub struct Vecs {
pub indexes_to_fee: ComputedValueVecsFromTxindex,
pub indexes_to_fee_rate: ComputedVecsFromTxindex<FeeRate>,
/// Value == 0 when Coinbase
pub txindex_to_input_value:
LazyVecFrom3<TxIndex, Sats, TxIndex, TxInIndex, TxIndex, StoredU64, TxInIndex, Sats>,
pub txindex_to_input_value: EagerVec<TxIndex, Sats>,
pub indexes_to_sent: ComputedValueVecsFromHeight,
// pub indexes_to_input_value: ComputedVecsFromTxindex<Sats>,
pub indexes_to_opreturn_count: ComputedVecsFromHeight<StoredU64>,
pub txindex_to_output_value:
LazyVecFrom3<TxIndex, Sats, TxIndex, TxOutIndex, TxIndex, StoredU64, TxOutIndex, Sats>,
pub txindex_to_output_value: EagerVec<TxIndex, Sats>,
// pub indexes_to_output_value: ComputedVecsFromTxindex<Sats>,
pub indexes_to_p2a_count: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_p2ms_count: ComputedVecsFromHeight<StoredU64>,
@@ -151,7 +149,7 @@ impl Vecs {
price: Option<&price::Vecs>,
) -> Result<Self> {
let db = Database::open(&parent_path.join("chain"))?;
db.set_min_len(PAGE_SIZE * 10_000_000)?;
db.set_min_len(PAGE_SIZE * 50_000_000)?;
let version = parent_version + Version::ZERO;
@@ -202,82 +200,11 @@ impl Vecs {
},
);
let txindex_to_input_value = LazyVecFrom3::init(
"input_value",
version + Version::ZERO,
indexer.vecs.txindex_to_first_txinindex.boxed_clone(),
indexes.txindex_to_input_count.boxed_clone(),
txinindex_to_value.boxed_clone(),
|index: TxIndex,
txindex_to_first_txinindex_iter,
txindex_to_input_count_iter,
txinindex_to_value_iter| {
let txindex = index.to_usize();
txindex_to_first_txinindex_iter
.get_at(txindex)
.map(|first_index| {
let first_index = usize::from(first_index);
let count = *txindex_to_input_count_iter.get_at_unwrap(txindex);
let range = first_index..first_index + count as usize;
range.into_iter().fold(Sats::ZERO, |total, txinindex| {
total + txinindex_to_value_iter.get_at_unwrap(txinindex)
})
})
},
);
let txindex_to_input_value =
EagerVec::forced_import_compressed(&db, "input_value", version + Version::ZERO)?;
// let indexes_to_input_value: ComputedVecsFromTxindex<Sats> =
// ComputedVecsFromTxindex::forced_import(
// db,
// "input_value",
// true,
// version + Version::ZERO,
// format,
// computation,
// StorableVecGeneatorOptions::default()
// .add_average()
// .add_sum()
// .add_cumulative(),
// )?;
let txindex_to_output_value = LazyVecFrom3::init(
"output_value",
version + Version::ZERO,
indexer.vecs.txindex_to_first_txoutindex.boxed_clone(),
indexes.txindex_to_output_count.boxed_clone(),
indexer.vecs.txoutindex_to_value.boxed_clone(),
|index: TxIndex,
txindex_to_first_txoutindex_iter,
txindex_to_output_count_iter,
txoutindex_to_value_iter| {
let txindex = index.to_usize();
txindex_to_first_txoutindex_iter
.get_at(txindex)
.map(|first_index| {
let first_index = usize::from(first_index);
let count = *txindex_to_output_count_iter.get_at_unwrap(txindex);
let range = first_index..first_index + count as usize;
range.into_iter().fold(Sats::ZERO, |total, txoutindex| {
let v = txoutindex_to_value_iter.get_at_unwrap(txoutindex);
total + v
})
})
},
);
// let indexes_to_output_value: ComputedVecsFromTxindex<Sats> =
// ComputedVecsFromTxindex::forced_import(
// db,
// "output_value",
// true,
// version + Version::ZERO,
// format,
// computation,
// StorableVecGeneatorOptions::default()
// .add_average()
// .add_sum()
// .add_cumulative(),
// )?;
let txindex_to_output_value =
EagerVec::forced_import_compressed(&db, "output_value", version + Version::ZERO)?;
let txindex_to_fee =
EagerVec::forced_import_compressed(&db, "fee", version + Version::ZERO)?;
@@ -1084,8 +1011,6 @@ impl Vecs {
txindex_to_is_coinbase,
txinindex_to_value,
// indexes_to_input_value,
// indexes_to_output_value,
txindex_to_input_value,
txindex_to_output_value,
txindex_to_fee,
@@ -1102,6 +1027,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}
@@ -1365,7 +1292,9 @@ impl Vecs {
compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v2, TxVersion::TWO)?;
compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v3, TxVersion::THREE)?;
let mut txoutindex_to_value_iter = indexer.vecs.txoutindex_to_value.into_iter();
// Because random reads are needed, reading directly from the mmap is faster than using buffered iterators
let txoutindex_to_value = &indexer.vecs.txoutindex_to_value;
let txoutindex_to_value_reader = indexer.vecs.txoutindex_to_value.create_reader();
self.txinindex_to_value.compute_transform(
starting_indexes.txinindex,
&indexes.txinindex_to_txoutindex,
@@ -1373,64 +1302,56 @@ impl Vecs {
let value = if txoutindex == TxOutIndex::COINBASE {
Sats::MAX
} else {
txoutindex_to_value_iter.get_unwrap(txoutindex)
txoutindex_to_value
.unchecked_read(txoutindex, &txoutindex_to_value_reader)
.unwrap()
};
(txinindex, value)
},
exit,
)?;
// indexes.txinindex_to_txoutindex.boxed_clone(),
// indexer.vecs.txoutindex_to_value.boxed_clone(),
// |index: TxInIndex, txinindex_to_txoutindex_iter, txoutindex_to_value_iter| {
// txinindex_to_txoutindex_iter.get_unwrap(index.to_usize()).map(
// |(txinindex, txoutindex)| {
// let txoutindex = txoutindex;
// if txoutindex == TxOutIndex::COINBASE {
// Sats::MAX
// } else if let Some((_, value)) =
// txoutindex_to_value_iter.get_unwrap(txoutindex.to_usize())
// {
// value
// } else {
// dbg!(txinindex, txoutindex);
// panic!()
// }
// },
// )
// },
// Debug: verify the computed txinindex_to_value
dbg!("txinindex_to_value first 20:");
for i in 0..20.min(self.txinindex_to_value.len()) {
let val = self.txinindex_to_value.read_at_unwrap_once(i);
dbg!((TxInIndex::from(i), val));
}
// self.indexes_to_output_value.compute_all(
// indexer,
// indexes,
// starting_indexes,
// exit,
// |vec| {
// vec.compute_sum_from_indexes(
// starting_indexes.txindex,
// &indexer.vecs.txindex_to_first_txoutindex,
// self.indexes_to_output_count.txindex.as_ref().unwrap(),
// &indexer.vecs.txoutindex_to_value,
// exit,
// )
// },
// )?;
// Debug: verify the computed txindex_to_input_count
dbg!("txindex_to_input_count first 20:");
for i in 0..20.min(indexes.txindex_to_input_count.len()) {
let val = indexes.txindex_to_input_count.read_at_unwrap_once(i);
dbg!((TxInIndex::from(i), val));
}
// self.indexes_to_input_value.compute_all(
// indexer,
// indexes,
// starting_indexes,
// exit,
// |vec| {
// vec.compute_sum_from_indexes(
// starting_indexes.txindex,
// &indexer.vecs.txindex_to_first_txinindex,
// self.indexes_to_input_count.txindex.as_ref().unwrap(),
// &self.txinindex_to_value,
// exit,
// )
// },
// )?;
self.txindex_to_input_value.compute_sum_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txinindex,
&indexes.txindex_to_input_count,
&self.txinindex_to_value,
exit,
)?;
// Debug: verify the computed input values
for i in 0..10.min(self.txindex_to_input_value.len()) {
let val = self.txindex_to_input_value.read_at_unwrap_once(i);
dbg!((TxIndex::from(i), "input_value", val));
}
self.txindex_to_output_value.compute_sum_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txoutindex,
&indexes.txindex_to_output_count,
&indexer.vecs.txoutindex_to_value,
exit,
)?;
// Debug: verify the computed output values
for i in 0..10.min(self.txindex_to_output_value.len()) {
let val = self.txindex_to_output_value.read_at_unwrap_once(i);
dbg!((TxIndex::from(i), "output_value", val));
}
self.txindex_to_fee.compute_transform2(
starting_indexes.txindex,
@@ -1439,9 +1360,10 @@ impl Vecs {
|(i, input, output, ..)| {
(
i,
if input.is_max() {
if unlikely(input.is_max()) {
Sats::ZERO
} else {
dbg!((i, input, output));
input.checked_sub(output).unwrap()
},
)

View File

@@ -284,6 +284,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}

View File

@@ -167,6 +167,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}

View File

@@ -7,7 +7,7 @@ use brk_traversable::Traversable;
use brk_types::{DateIndex, Height, OHLCCents, Version};
use vecdb::{
AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableVec, PAGE_SIZE, RawVec,
VecIndex, VecIterator,
TypedVecIterator, VecIndex,
};
use super::{Indexes, indexes};
@@ -49,6 +49,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}
@@ -75,24 +77,22 @@ impl Vecs {
let index = starting_indexes
.height
.min(Height::from(self.height_to_price_ohlc_in_cents.len()));
let mut prev_timestamp = index
.decremented()
.map(|prev_i| height_to_timestamp.iter().unwrap().get_unwrap(prev_i));
height_to_timestamp
.iter()?
.skip(index.to_usize())
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, v)| -> Result<()> {
self.height_to_price_ohlc_in_cents.forced_push_at(
i,
self.fetcher
.get_height(
i.into(),
v,
i.decremented().map(|prev_i| {
height_to_timestamp.into_iter().get_at_unwrap(prev_i)
}),
)
.get_height(i.into(), v, prev_timestamp)
.unwrap(),
exit,
)?;
prev_timestamp = Some(v);
Ok(())
})?;
self.height_to_price_ohlc_in_cents.safe_flush(exit)?;
@@ -100,24 +100,18 @@ impl Vecs {
let index = starting_indexes
.dateindex
.min(DateIndex::from(self.dateindex_to_price_ohlc_in_cents.len()));
let mut prev = None;
let mut prev = Some(index.decremented().map_or(OHLCCents::default(), |prev_i| {
self.dateindex_to_price_ohlc_in_cents
.iter()
.unwrap()
.get_unwrap(prev_i)
}));
indexes
.dateindex_to_date
.iter()
.skip(index.to_usize())
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, d)| -> Result<()> {
if prev.is_none() {
let i = i.to_usize();
prev.replace(if i > 0 {
self.dateindex_to_price_ohlc_in_cents
.into_iter()
.get_at_unwrap(i - 1)
} else {
OHLCCents::default()
});
}
let ohlc = if i.to_usize() + 100 >= self.dateindex_to_price_ohlc_in_cents.len()
&& let Ok(mut ohlc) = self.fetcher.get_date(d)
{

View File

@@ -222,8 +222,8 @@ where
});
source
.iter()
.skip(index.to_usize())
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, v)| -> Result<()> {
cumulative += v;
cumulative_vec.forced_push_at(i, cumulative, exit)?;
@@ -252,7 +252,6 @@ where
let index = self.starting_index(max_from);
let mut count_indexes_iter = count_indexes.iter();
let mut source_iter = source.iter();
let cumulative_vec = self.cumulative.as_mut();
@@ -263,12 +262,14 @@ where
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.skip(index.to_usize())
.enumerate()
.skip(index.to_usize())
.try_for_each(|(index, first_index)| -> Result<()> {
let count_index = count_indexes_iter.get_at_unwrap(index);
let count_index = count_indexes_iter.next().unwrap();
if let Some(first) = self.first.as_mut() {
let f = source_iter
@@ -420,8 +421,6 @@ where
let index = self.starting_index(max_from);
let mut count_indexes_iter = count_indexes.iter();
let mut source_first_iter = source.first.as_ref().map(|f| f.iter());
let mut source_last_iter = source.last.as_ref().map(|f| f.iter());
let mut source_max_iter = source.max.as_ref().map(|f| f.iter());
@@ -435,12 +434,14 @@ where
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.skip(index.to_usize())
.enumerate()
.skip(index.to_usize())
.try_for_each(|(index, first_index, ..)| -> Result<()> {
let count_index = count_indexes_iter.get_at_unwrap(index);
let count_index = count_indexes_iter.next().unwrap();
if let Some(first) = self.first.as_mut() {
let v = source_first_iter.as_mut().unwrap().get_unwrap(first_index);

View File

@@ -65,7 +65,7 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.len() {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::min_from(i))
@@ -90,10 +90,10 @@ where
),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.len() {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.len()))
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}),
@@ -107,10 +107,10 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.len() {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.len())
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.min()
},
@@ -126,10 +126,10 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.len() {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.len())
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.max()
},
@@ -145,10 +145,10 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.len() {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let vec = S1I::inclusive_range_from(i, source.len())
let vec = S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.collect::<Vec<_>>();
if vec.is_empty() {
@@ -175,10 +175,10 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.len() {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let vec = S1I::inclusive_range_from(i, source.len())
let vec = S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.collect::<Vec<_>>();
if vec.is_empty() {
@@ -197,10 +197,10 @@ where
source_extra.cumulative.as_ref().unwrap().boxed_clone(),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.len() {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.len()))
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}),

View File

@@ -110,6 +110,7 @@ where
.merge_branches()
.unwrap()
}
fn iter_any_writable(&self) -> impl Iterator<Item = &dyn AnyWritableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyWritableVec>> =
Box::new(self.height.iter_any_writable());

View File

@@ -397,8 +397,8 @@ impl ComputedRatioVecsFromDateIndex {
.as_ref()
.unwrap()
.iter()
.skip(starting_dateindex.to_usize())
.enumerate()
.skip(starting_dateindex.to_usize())
.try_for_each(|(index, ratio)| -> Result<()> {
if index < min_ratio_date_usize {
self.ratio_pct5
@@ -611,28 +611,25 @@ impl ComputedRatioVecsFromDateIndex {
}
fn mut_ratio_vecs(&mut self) -> Vec<&mut EagerVec<DateIndex, StoredF32>> {
[
self.ratio_pct1
.as_mut()
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]),
self.ratio_pct2
.as_mut()
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]),
self.ratio_pct5
.as_mut()
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]),
self.ratio_pct95
.as_mut()
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]),
self.ratio_pct98
.as_mut()
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]),
self.ratio_pct99
.as_mut()
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]),
]
.into_iter()
.flatten()
.collect::<Vec<_>>()
let mut vecs = Vec::with_capacity(6);
if let Some(v) = self.ratio_pct1.as_mut() {
vecs.push(v.dateindex.as_mut().unwrap());
}
if let Some(v) = self.ratio_pct2.as_mut() {
vecs.push(v.dateindex.as_mut().unwrap());
}
if let Some(v) = self.ratio_pct5.as_mut() {
vecs.push(v.dateindex.as_mut().unwrap());
}
if let Some(v) = self.ratio_pct95.as_mut() {
vecs.push(v.dateindex.as_mut().unwrap());
}
if let Some(v) = self.ratio_pct98.as_mut() {
vecs.push(v.dateindex.as_mut().unwrap());
}
if let Some(v) = self.ratio_pct99.as_mut() {
vecs.push(v.dateindex.as_mut().unwrap());
}
vecs
}
}

View File

@@ -483,8 +483,6 @@ impl ComputedStandardDeviationVecsFromDateIndex {
sorted.sort_unstable();
let mut sma_iter = sma.iter();
let mut p0_5sd = self.p0_5sd.as_mut().map(|c| c.dateindex.as_mut().unwrap());
let mut p1sd = self.p1sd.as_mut().map(|c| c.dateindex.as_mut().unwrap());
let mut p1_5sd = self.p1_5sd.as_mut().map(|c| c.dateindex.as_mut().unwrap());
@@ -499,11 +497,12 @@ impl ComputedStandardDeviationVecsFromDateIndex {
let mut m3sd = self.m3sd.as_mut().map(|c| c.dateindex.as_mut().unwrap());
let min_date_usize = min_date.to_usize();
let mut sma_iter = sma.iter().skip(starting_dateindex.to_usize());
source
.iter()
.skip(starting_dateindex.to_usize())
.enumerate()
.skip(starting_dateindex.to_usize())
.try_for_each(|(index, ratio)| -> Result<()> {
if index < min_date_usize {
self.sd.dateindex.as_mut().unwrap().forced_push_at(
@@ -548,11 +547,13 @@ impl ComputedStandardDeviationVecsFromDateIndex {
if let Some(v) = m3sd.as_mut() {
v.forced_push_at(index, StoredF32::NAN, exit)?
}
// Advance iterator to stay in sync
sma_iter.next();
} else {
let pos = sorted.binary_search(&ratio).unwrap_or_else(|pos| pos);
sorted.insert(pos, ratio);
let avg = sma_iter.get_at_unwrap(index);
let avg = sma_iter.next().unwrap();
let population =
index.checked_sub(min_date_usize).unwrap().to_usize() as f32 + 1.0;

View File

@@ -13,8 +13,8 @@ use brk_types::{
YearIndex,
};
use vecdb::{
Database, EagerVec, Exit, IterableCloneableVec, LazyVecFrom1, LazyVecFrom2, PAGE_SIZE,
TypedVecIterator, VecIndex,
Database, EagerVec, Exit, GenericStoredVec, IterableCloneableVec, LazyVecFrom1, PAGE_SIZE,
TypedVecIterator, unlikely,
};
const VERSION: Version = Version::ZERO;
@@ -79,14 +79,11 @@ pub struct Vecs {
pub semesterindex_to_first_monthindex: EagerVec<SemesterIndex, MonthIndex>,
pub semesterindex_to_monthindex_count: EagerVec<SemesterIndex, StoredU64>,
pub semesterindex_to_semesterindex: EagerVec<SemesterIndex, SemesterIndex>,
pub txindex_to_input_count:
LazyVecFrom2<TxIndex, StoredU64, TxIndex, TxInIndex, TxInIndex, TxOutIndex>,
pub txindex_to_output_count:
LazyVecFrom2<TxIndex, StoredU64, TxIndex, TxOutIndex, TxOutIndex, Sats>,
pub txindex_to_input_count: EagerVec<TxIndex, StoredU64>,
pub txindex_to_output_count: EagerVec<TxIndex, StoredU64>,
pub txindex_to_txindex: LazyVecFrom1<TxIndex, TxIndex, TxIndex, Txid>,
pub txinindex_to_txinindex: LazyVecFrom1<TxInIndex, TxInIndex, TxInIndex, OutPoint>,
pub txinindex_to_txoutindex:
LazyVecFrom2<TxInIndex, TxOutIndex, TxInIndex, OutPoint, TxIndex, TxOutIndex>,
pub txinindex_to_txoutindex: EagerVec<TxInIndex, TxOutIndex>,
pub txoutindex_to_txoutindex: LazyVecFrom1<TxOutIndex, TxOutIndex, TxOutIndex, Sats>,
pub unknownoutputindex_to_unknownoutputindex:
LazyVecFrom1<UnknownOutputIndex, UnknownOutputIndex, UnknownOutputIndex, TxIndex>,
@@ -110,179 +107,112 @@ impl Vecs {
let version = parent_version + VERSION;
let txinindex_to_txoutindex = LazyVecFrom2::init(
"txoutindex",
version,
indexer.vecs.txinindex_to_outpoint.boxed_clone(),
indexer.vecs.txindex_to_first_txoutindex.boxed_clone(),
|index: TxInIndex, txinindex_to_outpoint_iter, txindex_to_first_txoutindex_iter| {
txinindex_to_outpoint_iter
.get_at(index.to_usize())
.map(|outpoint| {
if outpoint.is_coinbase() {
return TxOutIndex::COINBASE;
}
txindex_to_first_txoutindex_iter
.get_at_unwrap(outpoint.txindex().to_usize())
+ outpoint.vout()
})
},
);
let txoutindex_to_txoutindex = LazyVecFrom1::init(
"txoutindex",
version + Version::ZERO,
indexer.vecs.txoutindex_to_value.boxed_clone(),
|index, _| Some(index),
);
let txinindex_to_txinindex = LazyVecFrom1::init(
"txinindex",
version + Version::ZERO,
indexer.vecs.txinindex_to_outpoint.boxed_clone(),
|index, _| Some(index),
);
let txindex_to_txindex = LazyVecFrom1::init(
"txindex",
version + Version::ZERO,
indexer.vecs.txindex_to_txid.boxed_clone(),
|index, _| Some(index),
);
let txindex_to_input_count = LazyVecFrom2::init(
"input_count",
version + Version::ZERO,
indexer.vecs.txindex_to_first_txinindex.boxed_clone(),
txinindex_to_txoutindex.boxed_clone(),
|index: TxIndex, txindex_to_first_txinindex_iter, txinindex_to_txoutindex_iter| {
let txindex = index.to_usize();
txindex_to_first_txinindex_iter
.get_at(txindex)
.map(|start| {
let start = usize::from(start);
let end = txindex_to_first_txinindex_iter
.get_at(txindex + 1)
.map(usize::from)
.unwrap_or_else(|| txinindex_to_txoutindex_iter.len());
StoredU64::from((start..end).count())
})
},
);
let txindex_to_output_count = LazyVecFrom2::init(
"output_count",
version + Version::ZERO,
indexer.vecs.txindex_to_first_txoutindex.boxed_clone(),
indexer.vecs.txoutindex_to_value.boxed_clone(),
|index: TxIndex, txindex_to_first_txoutindex_iter, txoutindex_to_value_iter| {
let txindex = index.to_usize();
txindex_to_first_txoutindex_iter
.get_at(txindex)
.map(|start| {
let start = usize::from(start);
let end = txindex_to_first_txoutindex_iter
.get_at(txindex + 1)
.map(usize::from)
.unwrap_or_else(|| txoutindex_to_value_iter.len());
StoredU64::from((start..end).count())
})
},
);
let p2pk33addressindex_to_p2pk33addressindex = LazyVecFrom1::init(
"p2pk33addressindex",
version + Version::ZERO,
indexer.vecs.p2pk33addressindex_to_p2pk33bytes.boxed_clone(),
|index, _| Some(index),
);
let p2pk65addressindex_to_p2pk65addressindex = LazyVecFrom1::init(
"p2pk65addressindex",
version + Version::ZERO,
indexer.vecs.p2pk65addressindex_to_p2pk65bytes.boxed_clone(),
|index, _| Some(index),
);
let p2pkhaddressindex_to_p2pkhaddressindex = LazyVecFrom1::init(
"p2pkhaddressindex",
version + Version::ZERO,
indexer.vecs.p2pkhaddressindex_to_p2pkhbytes.boxed_clone(),
|index, _| Some(index),
);
let p2shaddressindex_to_p2shaddressindex = LazyVecFrom1::init(
"p2shaddressindex",
version + Version::ZERO,
indexer.vecs.p2shaddressindex_to_p2shbytes.boxed_clone(),
|index, _| Some(index),
);
let p2traddressindex_to_p2traddressindex = LazyVecFrom1::init(
"p2traddressindex",
version + Version::ZERO,
indexer.vecs.p2traddressindex_to_p2trbytes.boxed_clone(),
|index, _| Some(index),
);
let p2wpkhaddressindex_to_p2wpkhaddressindex = LazyVecFrom1::init(
"p2wpkhaddressindex",
version + Version::ZERO,
indexer.vecs.p2wpkhaddressindex_to_p2wpkhbytes.boxed_clone(),
|index, _| Some(index),
);
let p2wshaddressindex_to_p2wshaddressindex = LazyVecFrom1::init(
"p2wshaddressindex",
version + Version::ZERO,
indexer.vecs.p2wshaddressindex_to_p2wshbytes.boxed_clone(),
|index, _| Some(index),
);
let p2aaddressindex_to_p2aaddressindex = LazyVecFrom1::init(
"p2aaddressindex",
version + Version::ZERO,
indexer.vecs.p2aaddressindex_to_p2abytes.boxed_clone(),
|index, _| Some(index),
);
let p2msoutputindex_to_p2msoutputindex = LazyVecFrom1::init(
"p2msoutputindex",
version + Version::ZERO,
indexer.vecs.p2msoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
);
let emptyoutputindex_to_emptyoutputindex = LazyVecFrom1::init(
"emptyoutputindex",
version + Version::ZERO,
indexer.vecs.emptyoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
);
let unknownoutputindex_to_unknownoutputindex = LazyVecFrom1::init(
"unknownoutputindex",
version + Version::ZERO,
indexer.vecs.unknownoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
);
let opreturnindex_to_opreturnindex = LazyVecFrom1::init(
"opreturnindex",
version + Version::ZERO,
indexer.vecs.opreturnindex_to_txindex.boxed_clone(),
|index, _| Some(index),
);
let this = Self {
txinindex_to_txoutindex,
emptyoutputindex_to_emptyoutputindex,
txinindex_to_txinindex,
opreturnindex_to_opreturnindex,
txoutindex_to_txoutindex,
p2aaddressindex_to_p2aaddressindex,
p2msoutputindex_to_p2msoutputindex,
p2pk33addressindex_to_p2pk33addressindex,
p2pk65addressindex_to_p2pk65addressindex,
p2pkhaddressindex_to_p2pkhaddressindex,
p2shaddressindex_to_p2shaddressindex,
p2traddressindex_to_p2traddressindex,
p2wpkhaddressindex_to_p2wpkhaddressindex,
p2wshaddressindex_to_p2wshaddressindex,
txindex_to_input_count,
txindex_to_output_count,
txindex_to_txindex,
unknownoutputindex_to_unknownoutputindex,
txinindex_to_txoutindex: EagerVec::forced_import_compressed(
&db,
"txoutindex",
version,
)?,
txoutindex_to_txoutindex: LazyVecFrom1::init(
"txoutindex",
version + Version::ZERO,
indexer.vecs.txoutindex_to_value.boxed_clone(),
|index, _| Some(index),
),
txinindex_to_txinindex: LazyVecFrom1::init(
"txinindex",
version + Version::ZERO,
indexer.vecs.txinindex_to_outpoint.boxed_clone(),
|index, _| Some(index),
),
p2pk33addressindex_to_p2pk33addressindex: LazyVecFrom1::init(
"p2pk33addressindex",
version + Version::ZERO,
indexer.vecs.p2pk33addressindex_to_p2pk33bytes.boxed_clone(),
|index, _| Some(index),
),
p2pk65addressindex_to_p2pk65addressindex: LazyVecFrom1::init(
"p2pk65addressindex",
version + Version::ZERO,
indexer.vecs.p2pk65addressindex_to_p2pk65bytes.boxed_clone(),
|index, _| Some(index),
),
p2pkhaddressindex_to_p2pkhaddressindex: LazyVecFrom1::init(
"p2pkhaddressindex",
version + Version::ZERO,
indexer.vecs.p2pkhaddressindex_to_p2pkhbytes.boxed_clone(),
|index, _| Some(index),
),
p2shaddressindex_to_p2shaddressindex: LazyVecFrom1::init(
"p2shaddressindex",
version + Version::ZERO,
indexer.vecs.p2shaddressindex_to_p2shbytes.boxed_clone(),
|index, _| Some(index),
),
p2traddressindex_to_p2traddressindex: LazyVecFrom1::init(
"p2traddressindex",
version + Version::ZERO,
indexer.vecs.p2traddressindex_to_p2trbytes.boxed_clone(),
|index, _| Some(index),
),
p2wpkhaddressindex_to_p2wpkhaddressindex: LazyVecFrom1::init(
"p2wpkhaddressindex",
version + Version::ZERO,
indexer.vecs.p2wpkhaddressindex_to_p2wpkhbytes.boxed_clone(),
|index, _| Some(index),
),
p2wshaddressindex_to_p2wshaddressindex: LazyVecFrom1::init(
"p2wshaddressindex",
version + Version::ZERO,
indexer.vecs.p2wshaddressindex_to_p2wshbytes.boxed_clone(),
|index, _| Some(index),
),
p2aaddressindex_to_p2aaddressindex: LazyVecFrom1::init(
"p2aaddressindex",
version + Version::ZERO,
indexer.vecs.p2aaddressindex_to_p2abytes.boxed_clone(),
|index, _| Some(index),
),
p2msoutputindex_to_p2msoutputindex: LazyVecFrom1::init(
"p2msoutputindex",
version + Version::ZERO,
indexer.vecs.p2msoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
emptyoutputindex_to_emptyoutputindex: LazyVecFrom1::init(
"emptyoutputindex",
version + Version::ZERO,
indexer.vecs.emptyoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
unknownoutputindex_to_unknownoutputindex: LazyVecFrom1::init(
"unknownoutputindex",
version + Version::ZERO,
indexer.vecs.unknownoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
opreturnindex_to_opreturnindex: LazyVecFrom1::init(
"opreturnindex",
version + Version::ZERO,
indexer.vecs.opreturnindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
txindex_to_txindex: LazyVecFrom1::init(
"txindex",
version + Version::ZERO,
indexer.vecs.txindex_to_txid.boxed_clone(),
|index, _| Some(index),
),
txindex_to_input_count: EagerVec::forced_import_compressed(
&db,
"input_count",
version + Version::ZERO,
)?,
txindex_to_output_count: EagerVec::forced_import_compressed(
&db,
"output_count",
version + Version::ZERO,
)?,
dateindex_to_date: EagerVec::forced_import_compressed(
&db,
"date",
@@ -497,6 +427,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}
@@ -517,10 +449,46 @@ impl Vecs {
starting_indexes: brk_indexer::Indexes,
exit: &Exit,
) -> Result<Indexes> {
// ---
// TxInIndex
// ---
let txindex_to_first_txoutindex = &indexer.vecs.txindex_to_first_txoutindex;
let txindex_to_first_txoutindex_reader = txindex_to_first_txoutindex.create_reader();
self.txinindex_to_txoutindex.compute_transform(
starting_indexes.txinindex,
&indexer.vecs.txinindex_to_outpoint,
|(txinindex, outpoint, ..)| {
if unlikely(outpoint.is_coinbase()) {
(txinindex, TxOutIndex::COINBASE)
} else {
let txoutindex = txindex_to_first_txoutindex
.read_unwrap(outpoint.txindex(), &txindex_to_first_txoutindex_reader)
+ outpoint.vout();
(txinindex, txoutindex)
}
},
exit,
)?;
// ---
// TxIndex
// ---
self.txindex_to_input_count.compute_count_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txinindex,
&self.txinindex_to_txoutindex,
exit,
)?;
self.txindex_to_output_count.compute_count_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txoutindex,
&indexer.vecs.txoutindex_to_value,
exit,
)?;
self.height_to_txindex_count.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_txindex,
@@ -928,6 +896,7 @@ impl Vecs {
}
}
#[derive(Debug)]
pub struct Indexes {
indexes: brk_indexer::Indexes,
pub dateindex: DateIndex,

View File

@@ -174,35 +174,35 @@ impl Computer {
self.blks
.compute(indexer, &starting_indexes, parser, exit)?;
std::thread::scope(|scope| -> Result<()> {
let constants = scope.spawn(|| -> Result<()> {
info!("Computing constants...");
self.constants
.compute(&self.indexes, &starting_indexes, exit)?;
Ok(())
});
// std::thread::scope(|scope| -> Result<()> {
// let constants = scope.spawn(|| -> Result<()> {
info!("Computing constants...");
self.constants
.compute(&self.indexes, &starting_indexes, exit)?;
// Ok(())
// });
let chain = scope.spawn(|| -> Result<()> {
info!("Computing chain...");
self.chain.compute(
indexer,
&self.indexes,
&starting_indexes,
self.price.as_ref(),
exit,
)?;
Ok(())
});
// let chain = scope.spawn(|| -> Result<()> {
info!("Computing chain...");
self.chain.compute(
indexer,
&self.indexes,
&starting_indexes,
self.price.as_ref(),
exit,
)?;
// Ok(())
// });
if let Some(price) = self.price.as_ref() {
info!("Computing market...");
self.market.compute(price, &starting_indexes, exit)?;
}
if let Some(price) = self.price.as_ref() {
info!("Computing market...");
self.market.compute(price, &starting_indexes, exit)?;
}
constants.join().unwrap()?;
chain.join().unwrap()?;
Ok(())
})?;
// constants.join().unwrap()?;
// chain.join().unwrap()?;
// Ok(())
// })?;
self.pools.compute(
indexer,
@@ -213,6 +213,8 @@ impl Computer {
exit,
)?;
return Ok(());
info!("Computing stateful...");
self.stateful.compute(
indexer,

View File

@@ -1522,6 +1522,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}

View File

@@ -67,6 +67,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}

File diff suppressed because it is too large Load Diff

View File

@@ -487,6 +487,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}

View File

@@ -23,6 +23,7 @@ pub trait ComputeDCAStackViaLen {
exit: &Exit,
) -> Result<()>;
}
impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> {
fn compute_dca_stack_via_len(
&mut self,
@@ -35,41 +36,40 @@ impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> {
Version::ZERO + self.inner_version() + closes.version(),
)?;
let mut other_iter = closes.iter();
let mut prev = None;
let index = max_from.to_usize().min(self.len());
// Initialize prev before the loop to avoid checking on every iteration
let mut prev = if index == 0 {
Sats::ZERO
} else {
self.read_at_unwrap_once(index - 1)
};
let mut lookback = closes.create_lookback(index, len, 0);
closes
.iter()
.skip(index)
.enumerate()
.skip(index)
.try_for_each(|(i, closes)| {
let price = *closes;
let i_usize = i.to_usize();
if prev.is_none() {
if i_usize == 0 {
prev.replace(Sats::ZERO);
} else {
prev.replace(self.read_at_unwrap_once(i_usize - 1));
}
}
let mut stack = Sats::ZERO;
if price != Dollars::ZERO {
stack = prev.unwrap() + Sats::from(Bitcoin::from(DCA_AMOUNT / price));
stack = prev + Sats::from(Bitcoin::from(DCA_AMOUNT / price));
if i_usize >= len {
let prev_price = *other_iter.get_at_unwrap(i_usize - len);
if prev_price != Dollars::ZERO {
stack = stack
.checked_sub(Sats::from(Bitcoin::from(DCA_AMOUNT / prev_price)))
.unwrap();
}
let prev_price =
*lookback.get_and_push(i_usize, Close::new(price), Close::default());
if prev_price != Dollars::ZERO {
stack = stack
.checked_sub(Sats::from(Bitcoin::from(DCA_AMOUNT / prev_price)))
.unwrap();
}
}
prev.replace(stack);
prev = stack;
self.forced_push_at(i, stack, exit)
})?;
@@ -90,32 +90,30 @@ impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> {
Version::ZERO + self.inner_version() + closes.version(),
)?;
let mut prev = None;
let from = from.to_usize();
let index = max_from.min(DateIndex::from(self.len()));
// Initialize prev before the loop to avoid checking on every iteration
let mut prev = if index.to_usize() == 0 {
Sats::ZERO
} else {
self.read_at_unwrap_once(index.to_usize() - 1)
};
closes
.iter()
.skip(index.to_usize())
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, closes)| {
let price = *closes;
let i_usize = i.to_usize();
if prev.is_none() {
if i_usize == 0 {
prev.replace(Sats::ZERO);
} else {
prev.replace(self.read_at_unwrap_once(i_usize - 1));
}
}
let mut stack = Sats::ZERO;
if price != Dollars::ZERO && i >= from {
stack = prev.unwrap() + Sats::from(Bitcoin::from(DCA_AMOUNT / price));
stack = prev + Sats::from(Bitcoin::from(DCA_AMOUNT / price));
}
prev.replace(stack);
prev = stack;
self.forced_push_at(i, stack, exit)
})?;
@@ -143,6 +141,7 @@ pub trait ComputeDCAAveragePriceViaLen {
exit: &Exit,
) -> Result<()>;
}
impl ComputeDCAAveragePriceViaLen for EagerVec<DateIndex, Dollars> {
fn compute_dca_avg_price_via_len(
&mut self,
@@ -163,8 +162,8 @@ impl ComputeDCAAveragePriceViaLen for EagerVec<DateIndex, Dollars> {
stacks
.iter()
.skip(index.to_usize())
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, stack)| {
let mut avg_price = Dollars::from(f64::NAN);
if i > first_price_date {
@@ -199,8 +198,8 @@ impl ComputeDCAAveragePriceViaLen for EagerVec<DateIndex, Dollars> {
stacks
.iter()
.skip(index.to_usize())
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, stack)| {
let mut avg_price = Dollars::from(f64::NAN);
if i >= from {
@@ -223,6 +222,7 @@ pub trait ComputeFromSats<I> {
exit: &Exit,
) -> Result<()>;
}
impl<I> ComputeFromSats<I> for EagerVec<I, Bitcoin>
where
I: VecIndex,
@@ -233,21 +233,12 @@ where
sats: &impl IterableVec<I, Sats>,
exit: &Exit,
) -> Result<()> {
self.validate_computed_version_or_reset(
Version::ZERO + self.inner_version() + sats.version(),
self.compute_transform(
max_from,
sats,
|(i, sats, _)| (i, Bitcoin::from(sats)),
exit,
)?;
let index = max_from.min(I::from(self.len()));
sats.iter()
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, sats)| {
let (i, v) = (i, Bitcoin::from(sats));
self.forced_push_at(i, v, exit)
})?;
self.safe_flush(exit)?;
Ok(())
}
}
@@ -261,6 +252,7 @@ pub trait ComputeFromBitcoin<I> {
exit: &Exit,
) -> Result<()>;
}
impl<I> ComputeFromBitcoin<I> for EagerVec<I, Dollars>
where
I: VecIndex,
@@ -272,24 +264,13 @@ where
price: &impl IterableVec<I, Close<Dollars>>,
exit: &Exit,
) -> Result<()> {
self.validate_computed_version_or_reset(
Version::ZERO + self.inner_version() + bitcoin.version(),
self.compute_transform2(
max_from,
bitcoin,
price,
|(i, bitcoin, price, _)| (i, *price * bitcoin),
exit,
)?;
let mut price_iter = price.iter();
let index = max_from.min(I::from(self.len()));
bitcoin
.iter()
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, bitcoin)| {
let dollars = price_iter.get_at_unwrap(i);
let (i, v) = (i, *dollars * bitcoin);
self.forced_push_at(i, v, exit)
})?;
self.safe_flush(exit)?;
Ok(())
}
}
@@ -303,6 +284,7 @@ pub trait ComputeDrawdown<I> {
exit: &Exit,
) -> Result<()>;
}
impl<I> ComputeDrawdown<I> for EagerVec<I, StoredF32>
where
I: VecIndex,
@@ -314,27 +296,20 @@ where
ath: &impl IterableVec<I, Dollars>,
exit: &Exit,
) -> Result<()> {
self.validate_computed_version_or_reset(
Version::ZERO + self.inner_version() + ath.version() + close.version(),
)?;
let index = max_from.min(I::from(self.len()));
let mut close_iter = close.iter();
ath.iter()
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, ath)| {
self.compute_transform2(
max_from,
ath,
close,
|(i, ath, close, _)| {
if ath == Dollars::ZERO {
self.forced_push_at(i, StoredF32::default(), exit)
(i, StoredF32::default())
} else {
let close = *close_iter.get_at_unwrap(i);
let drawdown = StoredF32::from((*ath - *close) / *ath * -100.0);
self.forced_push_at(i, drawdown, exit)
let drawdown = StoredF32::from((*ath - **close) / *ath * -100.0);
(i, drawdown)
}
})?;
self.safe_flush(exit)?;
},
exit,
)?;
Ok(())
}
}

View File

@@ -16,7 +16,7 @@ fjall2 = { workspace = true }
fjall3 = { workspace = true }
jiff = { workspace = true }
minreq = { workspace = true }
sonic-rs = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true }
vecdb = { workspace = true }
zerocopy = { workspace = true }

View File

@@ -23,7 +23,7 @@ pub enum Error {
BitcoinHexError(bitcoin::consensus::encode::FromHexError),
BitcoinFromScriptError(bitcoin::address::FromScriptError),
BitcoinHexToArrayError(bitcoin::hex::HexToArrayError),
SonicRS(sonic_rs::Error),
SerdeJSON(serde_json::Error),
TokioJoin(tokio::task::JoinError),
ZeroCopyError,
Vecs(vecdb::Error),
@@ -86,10 +86,10 @@ impl From<time::SystemTimeError> for Error {
}
}
impl From<sonic_rs::Error> for Error {
impl From<serde_json::Error> for Error {
#[inline]
fn from(error: sonic_rs::Error) -> Self {
Self::SonicRS(error)
fn from(error: serde_json::Error) -> Self {
Self::SerdeJSON(error)
}
}
@@ -192,7 +192,7 @@ impl fmt::Display for Error {
Error::Jiff(error) => Display::fmt(&error, f),
Error::Minreq(error) => Display::fmt(&error, f),
Error::RawDB(error) => Display::fmt(&error, f),
Error::SonicRS(error) => Display::fmt(&error, f),
Error::SerdeJSON(error) => Display::fmt(&error, f),
Error::SystemTimeError(error) => Display::fmt(&error, f),
Error::TokioJoin(error) => Display::fmt(&error, f),
Error::VecDB(error) => Display::fmt(&error, f),

View File

@@ -15,4 +15,4 @@ brk_logger = { workspace = true }
brk_types = { workspace = true }
log = { workspace = true }
minreq = { workspace = true }
sonic-rs = { workspace = true }
serde_json = { workspace = true }

View File

@@ -8,7 +8,7 @@ use std::{
use brk_error::{Error, Result};
use brk_types::{Cents, OHLCCents, Timestamp};
use log::info;
use sonic_rs::{JsonContainerTrait, JsonValueTrait, Value};
use serde_json::Value;
use crate::{Close, Date, Dollars, Fetcher, High, Low, Open, default_retry};
@@ -68,10 +68,10 @@ impl Binance {
info!("Fetching 1mn prices from Binance...");
default_retry(|_| {
Self::json_to_timestamp_to_ohlc(&sonic_rs::from_str(
Self::json_to_timestamp_to_ohlc(&serde_json::from_slice(
minreq::get(Self::url("interval=1m&limit=1000"))
.send()?
.as_str()?,
.as_bytes(),
)?)
})
}
@@ -93,8 +93,8 @@ impl Binance {
info!("Fetching daily prices from Binance...");
default_retry(|_| {
Self::json_to_date_to_ohlc(&sonic_rs::from_str(
minreq::get(Self::url("interval=1d")).send()?.as_str()?,
Self::json_to_date_to_ohlc(&serde_json::from_slice(
minreq::get(Self::url("interval=1d")).send()?.as_bytes(),
)?)
})
}
@@ -120,7 +120,7 @@ impl Binance {
let reader = BufReader::new(file);
let json: BTreeMap<String, Value> = if let Ok(json) = sonic_rs::from_reader(reader) {
let json: BTreeMap<String, Value> = if let Ok(json) = serde_json::from_reader(reader) {
json
} else {
return Ok(Default::default());
@@ -130,7 +130,7 @@ impl Binance {
.ok_or(Error::Str("Expect object to have log attribute"))?
.as_object()
.ok_or(Error::Str("Expect to be an object"))?
.get(&"entries")
.get("entries")
.ok_or(Error::Str("Expect object to have entries"))?
.as_array()
.ok_or(Error::Str("Expect to be an array"))?
@@ -139,11 +139,11 @@ impl Binance {
entry
.as_object()
.unwrap()
.get(&"request")
.get("request")
.unwrap()
.as_object()
.unwrap()
.get(&"url")
.get("url")
.unwrap()
.as_str()
.unwrap()
@@ -153,14 +153,14 @@ impl Binance {
let response = entry
.as_object()
.unwrap()
.get(&"response")
.get("response")
.unwrap()
.as_object()
.unwrap();
let content = response.get(&"content").unwrap().as_object().unwrap();
let content = response.get("content").unwrap().as_object().unwrap();
let text = content.get(&"text");
let text = content.get("text");
if text.is_none() {
return Ok(BTreeMap::new());
@@ -168,7 +168,7 @@ impl Binance {
let text = text.unwrap().as_str().unwrap();
Self::json_to_timestamp_to_ohlc(&sonic_rs::from_str(text).unwrap())
Self::json_to_timestamp_to_ohlc(&serde_json::from_str(text).unwrap())
})
.try_fold(BTreeMap::default(), |mut all, res| {
all.append(&mut res?);

View File

@@ -3,7 +3,7 @@ use std::collections::BTreeMap;
use brk_error::{Error, Result};
use brk_types::{Cents, CheckedSub, Date, DateIndex, Height, OHLCCents};
use log::info;
use sonic_rs::{JsonContainerTrait, JsonValueTrait, Value};
use serde_json::Value;
use crate::{Close, Dollars, High, Low, Open, default_retry};
@@ -51,7 +51,7 @@ impl BRK {
height + CHUNK_SIZE
);
let body: Value = sonic_rs::from_str(minreq::get(url).send()?.as_str()?)?;
let body: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?;
body.as_array()
.ok_or(Error::Str("Expect to be an array"))?
@@ -96,7 +96,7 @@ impl BRK {
dateindex + CHUNK_SIZE
);
let body: Value = sonic_rs::from_str(minreq::get(url).send()?.as_str()?)?;
let body: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?;
body.as_array()
.ok_or(Error::Str("Expect to be an array"))?

View File

@@ -3,7 +3,7 @@ use std::collections::BTreeMap;
use brk_error::{Error, Result};
use brk_types::{Cents, Close, Date, Dollars, High, Low, OHLCCents, Open, Timestamp};
use log::info;
use sonic_rs::{JsonContainerTrait, JsonValueTrait, Value};
use serde_json::Value;
use crate::{Fetcher, default_retry};
@@ -36,8 +36,8 @@ impl Kraken {
info!("Fetching 1mn prices from Kraken...");
default_retry(|_| {
Self::json_to_timestamp_to_ohlc(&sonic_rs::from_str(
minreq::get(Self::url(1)).send()?.as_str()?,
Self::json_to_timestamp_to_ohlc(&serde_json::from_slice(
minreq::get(Self::url(1)).send()?.as_bytes(),
)?)
})
}
@@ -58,8 +58,8 @@ impl Kraken {
info!("Fetching daily prices from Kraken...");
default_retry(|_| {
Self::json_to_date_to_ohlc(&sonic_rs::from_str(
minreq::get(Self::url(1440)).send()?.as_str()?,
Self::json_to_date_to_ohlc(&serde_json::from_slice(
minreq::get(Self::url(1440)).send()?.as_bytes(),
)?)
})
}
@@ -79,11 +79,11 @@ impl Kraken {
{
json.as_object()
.ok_or(Error::Str("Expect to be an object"))?
.get(&"result")
.get("result")
.ok_or(Error::Str("Expect object to have result"))?
.as_object()
.ok_or(Error::Str("Expect to be an object"))?
.get(&"XXBTZUSD")
.get("XXBTZUSD")
.ok_or(Error::Str("Expect to have XXBTZUSD"))?
.as_array()
.ok_or(Error::Str("Expect to be an array"))?

View File

@@ -1,4 +1,8 @@
use std::{env, fs, path::Path, time::Instant};
use std::{
env, fs,
path::Path,
time::{Duration, Instant},
};
use brk_bencher::Bencher;
use brk_error::Result;
@@ -6,7 +10,7 @@ use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use log::debug;
use log::{debug, info};
use vecdb::Exit;
fn main() -> Result<()> {
@@ -33,7 +37,7 @@ fn main() -> Result<()> {
let mut indexer = Indexer::forced_import(&outputs_dir)?;
let mut bencher =
Bencher::from_cargo_env(env!("CARGO_PKG_NAME"), &outputs_dir.join("indexed/stores"))?;
Bencher::from_cargo_env(env!("CARGO_PKG_NAME"), &outputs_dir.join("indexed"))?;
bencher.start()?;
let exit = Exit::new();
@@ -46,10 +50,12 @@ fn main() -> Result<()> {
let i = Instant::now();
indexer.checked_index(&blocks, &client, &exit)?;
dbg!(i.elapsed());
info!("Done in {:?}", i.elapsed());
// We want to benchmark the drop too
drop(indexer);
std::thread::sleep(Duration::from_secs(10));
Ok(())
}

View File

@@ -11,15 +11,27 @@ fn main() -> Result<()> {
let indexer = Indexer::forced_import(&outputs_dir)?;
let mut sum = Sats::ZERO;
let mut count: usize = 0;
// let mut sum = Sats::ZERO;
// let mut count: usize = 0;
for value in indexer.vecs.txoutindex_to_value.clean_iter().unwrap() {
sum += value;
count += 1;
}
// for value in indexer.vecs.txoutindex_to_value.clean_iter()? {
// sum += value;
// count += 1;
// }
println!("sum = {sum}, count = {count}");
// println!("sum = {sum}, count = {count}");
dbg!(
indexer
.vecs
.txoutindex_to_value
.iter()?
.enumerate()
.take(200)
// .filter(|(_, op)| !op.is_coinbase())
// .map(|(i, op)| (i, op.txindex(), op.vout()))
.collect::<Vec<_>>()
);
Ok(())
}

View File

@@ -96,6 +96,7 @@ impl From<(Height, &mut Vecs, &Stores)> for Indexes {
let stores_starting_height = stores.starting_height();
let height = vecs_starting_height.min(stores_starting_height);
if height < min_height {
dbg!(height, min_height);
unreachable!()
}

View File

@@ -17,13 +17,13 @@ use rayon::prelude::*;
use rustc_hash::{FxHashMap, FxHashSet};
use vecdb::{AnyVec, Exit, GenericStoredVec, Reader, TypedVecIterator};
mod indexes;
mod stores_v2;
// mod stores_v3;
// mod stores_v2;
mod stores_v3;
mod vecs;
pub use indexes::*;
pub use stores_v2::*;
// pub use stores_v3::*;
// pub use stores_v2::*;
pub use stores_v3::*;
pub use vecs::*;
// One version for all data sources

View File

@@ -186,7 +186,7 @@ impl Stores {
})
.collect::<Result<Vec<_>>>()?;
self.database.batch().commit_partitions(tuples)?;
self.database.batch().commit_keyspaces(tuples)?;
self.database
.persist(PersistMode::SyncAll)

View File

@@ -18,6 +18,7 @@ use crate::Indexes;
#[derive(Clone, Traversable)]
pub struct Vecs {
db: Database,
pub emptyoutputindex_to_txindex: CompressedVec<EmptyOutputIndex, TxIndex>,
pub height_to_blockhash: RawVec<Height, BlockHash>,
pub height_to_difficulty: CompressedVec<Height, StoredF64>,
@@ -52,7 +53,7 @@ pub struct Vecs {
pub p2wshaddressindex_to_p2wshbytes: RawVec<P2WSHAddressIndex, P2WSHBytes>,
pub txindex_to_base_size: CompressedVec<TxIndex, StoredU32>,
pub txindex_to_first_txinindex: CompressedVec<TxIndex, TxInIndex>,
pub txindex_to_first_txoutindex: CompressedVec<TxIndex, TxOutIndex>,
pub txindex_to_first_txoutindex: RawVec<TxIndex, TxOutIndex>,
pub txindex_to_height: CompressedVec<TxIndex, Height>,
pub txindex_to_is_explicitly_rbf: CompressedVec<TxIndex, StoredBool>,
pub txindex_to_rawlocktime: CompressedVec<TxIndex, RawLockTime>,
@@ -167,11 +168,7 @@ impl Vecs {
"first_txinindex",
version,
)?,
txindex_to_first_txoutindex: CompressedVec::forced_import(
&db,
"first_txoutindex",
version,
)?,
txindex_to_first_txoutindex: RawVec::forced_import(&db, "first_txoutindex", version)?,
txindex_to_is_explicitly_rbf: CompressedVec::forced_import(
&db,
"is_explicitly_rbf",
@@ -197,6 +194,8 @@ impl Vecs {
.collect(),
)?;
this.db.compact()?;
Ok(this)
}
@@ -420,4 +419,8 @@ impl Vecs {
]
.into_iter()
}
pub fn db(&self) -> &Database {
&self.db
}
}

View File

@@ -28,8 +28,8 @@ pub fn init(path: Option<&Path>) -> io::Result<()> {
}
Builder::from_env(Env::default().default_filter_or(
"debug,bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off",
// "debug,fjall=trace,bitcoin=off,bitcoincore-rpc=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off",
"debug,bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,rustls=off",
// "debug,fjall=trace,bitcoin=off,bitcoincore-rpc=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,rustls=off",
))
.format(move |buf, record| {
let date_time = Timestamp::now()

View File

@@ -29,7 +29,8 @@ jiff = { workspace = true }
log = { workspace = true }
quick_cache = "0.6.18"
schemars = { workspace = true }
sonic-rs = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true }
tower-http = { version = "0.6.6", features = ["compression-full", "trace"] }
tracing = "0.1.41"

View File

@@ -3,6 +3,7 @@ use axum::{
http::{Response, StatusCode},
response::IntoResponse,
};
use serde::Serialize;
use super::header_map::HeaderMapExtended;
@@ -13,10 +14,10 @@ where
fn new_not_modified() -> Self;
fn new_json<T>(value: T, etag: &str) -> Self
where
T: sonic_rs::Serialize;
T: Serialize;
fn new_json_with<T>(status: StatusCode, value: T, etag: &str) -> Self
where
T: sonic_rs::Serialize;
T: Serialize;
}
impl ResponseExtended for Response<Body> {
@@ -29,16 +30,16 @@ impl ResponseExtended for Response<Body> {
fn new_json<T>(value: T, etag: &str) -> Self
where
T: sonic_rs::Serialize,
T: Serialize,
{
Self::new_json_with(StatusCode::default(), value, etag)
}
fn new_json_with<T>(status: StatusCode, value: T, etag: &str) -> Self
where
T: sonic_rs::Serialize,
T: Serialize,
{
let bytes = sonic_rs::to_vec(&value).unwrap();
let bytes = serde_json::to_vec(&value).unwrap();
let mut response = Response::builder().body(bytes.into()).unwrap();
*response.status_mut() = status;
let headers = response.headers_mut();

View File

@@ -1,5 +1,6 @@
use axum::{http::StatusCode, response::Response};
use brk_error::{Error, Result};
use serde::Serialize;
use crate::extended::ResponseExtended;
@@ -7,7 +8,7 @@ pub trait ResultExtended<T> {
fn with_status(self) -> Result<T, (StatusCode, String)>;
fn to_json_response(self, etag: &str) -> Response
where
T: sonic_rs::Serialize;
T: Serialize;
}
impl<T> ResultExtended<T> for Result<T> {
@@ -29,7 +30,7 @@ impl<T> ResultExtended<T> for Result<T> {
fn to_json_response(self, etag: &str) -> Response
where
T: sonic_rs::Serialize,
T: Serialize,
{
match self.with_status() {
Ok(value) => Response::new_json(&value, etag),

View File

@@ -6,7 +6,7 @@ use std::{
use derive_deref::{Deref, DerefMut};
use serde::{Serialize, Serializer, ser::SerializeTuple};
use vecdb::{Compressable, Formattable};
use vecdb::{Compressable, Formattable, TransparentCompressable};
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::StoredF64;
@@ -77,7 +77,7 @@ impl Formattable for OHLCCents {
}
}
#[derive(Debug, Default, Clone, FromBytes, Immutable, IntoBytes, KnownLayout)]
#[derive(Debug, Default, Clone, Copy, FromBytes, Immutable, IntoBytes, KnownLayout)]
#[repr(C)]
pub struct OHLCDollars {
pub open: Open<Dollars>,
@@ -86,17 +86,24 @@ pub struct OHLCDollars {
pub close: Close<Dollars>,
}
// This is FAKE, just to be able to use EagerVec
// Need to find a better way
impl Compressable for OHLCDollars {
type NumberType = u64;
}
impl TransparentCompressable<u64> for OHLCDollars {}
impl Serialize for OHLCDollars {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut tup = serializer.serialize_tuple(4)?;
tup.serialize_element(&self.open)?;
tup.serialize_element(&self.high)?;
tup.serialize_element(&self.low)?;
tup.serialize_element(&self.close)?;
tup.end()
let mut tuple = serializer.serialize_tuple(4)?;
tuple.serialize_element(&self.open)?;
tuple.serialize_element(&self.high)?;
tuple.serialize_element(&self.low)?;
tuple.serialize_element(&self.close)?;
tuple.end()
}
}
@@ -160,7 +167,7 @@ impl Formattable for OHLCDollars {
}
}
#[derive(Debug, Default, Clone, FromBytes, Immutable, IntoBytes, KnownLayout)]
#[derive(Debug, Default, Clone, Copy, FromBytes, Immutable, IntoBytes, KnownLayout)]
#[repr(C)]
pub struct OHLCSats {
pub open: Open<Sats>,
@@ -169,17 +176,24 @@ pub struct OHLCSats {
pub close: Close<Sats>,
}
// This is FAKE, just to be able to use EagerVec
// Need to find a better way
impl Compressable for OHLCSats {
type NumberType = u64;
}
impl TransparentCompressable<u64> for OHLCSats {}
impl Serialize for OHLCSats {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut tup = serializer.serialize_tuple(4)?;
tup.serialize_element(&self.open)?;
tup.serialize_element(&self.high)?;
tup.serialize_element(&self.low)?;
tup.serialize_element(&self.close)?;
tup.end()
let mut tuple = serializer.serialize_tuple(4)?;
tuple.serialize_element(&self.open)?;
tuple.serialize_element(&self.high)?;
tuple.serialize_element(&self.low)?;
tuple.serialize_element(&self.close)?;
tuple.end()
}
}

View File

@@ -34,14 +34,17 @@ impl OutPoint {
Self(txindex_bits | vout_bits)
}
#[inline(always)]
pub fn txindex(self) -> TxIndex {
TxIndex::from((self.0 >> 32) as u32)
}
#[inline(always)]
pub fn vout(self) -> Vout {
Vout::from(self.0 as u32)
}
#[inline(always)]
pub fn is_coinbase(self) -> bool {
self == Self::COINBASE
}

View File

@@ -7,7 +7,7 @@ use bitcoin::Amount;
use derive_deref::Deref;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Compressable, Formattable};
use vecdb::{CheckedSub, Compressable, Formattable, SaturatingAdd};
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::StoredF64;
@@ -97,7 +97,7 @@ impl AddAssign for Sats {
}
}
impl CheckedSub<Sats> for Sats {
impl CheckedSub for Sats {
fn checked_sub(self, rhs: Self) -> Option<Self> {
self.0.checked_sub(rhs.0).map(Self::from)
}
@@ -109,6 +109,12 @@ impl CheckedSub<usize> for Sats {
}
}
impl SaturatingAdd for Sats {
fn saturating_add(self, rhs: Self) -> Self {
Self(self.0.saturating_add(rhs.0))
}
}
impl SubAssign for Sats {
fn sub_assign(&mut self, rhs: Self) {
*self = self.checked_sub(rhs).unwrap();