global: snapshot

This commit is contained in:
nym21
2025-11-06 13:07:07 +01:00
parent a2ba4d89f3
commit df577ca7f5
62 changed files with 1078 additions and 2291 deletions

2
.gitignore vendored
View File

@@ -14,7 +14,7 @@ bridge/
_* _*
# Logs # Logs
.log* *.log*
# Environment variables/configs # Environment variables/configs
.env .env

133
Cargo.lock generated
View File

@@ -73,7 +73,7 @@ checksum = "fe233a377643e0fc1a56421d7c90acdec45c291b30345eb9f08e8d0ddce5a4ab"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -212,7 +212,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -755,8 +755,8 @@ dependencies = [
"brk_traversable", "brk_traversable",
"brk_types", "brk_types",
"log", "log",
"rand 0.9.2",
"rayon", "rayon",
"redb",
"rustc-hash", "rustc-hash",
"vecdb", "vecdb",
] ]
@@ -885,7 +885,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"serde_json", "serde_json",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -1294,7 +1294,6 @@ dependencies = [
"candystore", "candystore",
"log", "log",
"parking_lot 0.12.5", "parking_lot 0.12.5",
"redb",
"rustc-hash", "rustc-hash",
] ]
@@ -1326,7 +1325,7 @@ version = "0.0.111"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -1336,13 +1335,12 @@ dependencies = [
"allocative", "allocative",
"bitcoin", "bitcoin",
"brk_error", "brk_error",
"byteview 0.8.0", "byteview 0.6.1",
"derive_deref", "derive_deref",
"itoa", "itoa",
"jiff", "jiff",
"num_enum", "num_enum",
"rapidhash", "rapidhash",
"redb",
"ryu", "ryu",
"schemars", "schemars",
"serde", "serde",
@@ -1400,7 +1398,7 @@ checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -1542,7 +1540,7 @@ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -1812,7 +1810,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"strsim", "strsim",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -1823,7 +1821,7 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81"
dependencies = [ dependencies = [
"darling_core", "darling_core",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -1865,7 +1863,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf656eb071fe87d23716f933788a35a8ad6baa6fdbf66a67a261dbd3f9dc81a" checksum = "daf656eb071fe87d23716f933788a35a8ad6baa6fdbf66a67a261dbd3f9dc81a"
dependencies = [ dependencies = [
"quote2", "quote2",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -1885,7 +1883,7 @@ checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -1916,7 +1914,7 @@ checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
"unicode-xid", "unicode-xid",
] ]
@@ -1950,7 +1948,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -2004,7 +2002,7 @@ dependencies = [
"once_cell", "once_cell",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -2231,7 +2229,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -2691,7 +2689,7 @@ checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -2865,6 +2863,8 @@ dependencies = [
[[package]] [[package]]
name = "lsm-tree" name = "lsm-tree"
version = "3.0.0-pre.4" version = "3.0.0-pre.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70cb98c7f7806b0ad4e9872735c8674bba8ce1277d0fc688d66e8a272b578dd7"
dependencies = [ dependencies = [
"byteorder-lite", "byteorder-lite",
"byteview 0.8.0", "byteview 0.8.0",
@@ -2978,7 +2978,7 @@ checksum = "b093064383341eb3271f42e381cb8f10a01459478446953953c75d24bd339fc0"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
"target-features", "target-features",
] ]
@@ -2999,7 +2999,7 @@ checksum = "4568f25ccbd45ab5d5603dc34318c1ec56b117531781260002151b8530a9f931"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -3124,7 +3124,7 @@ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -3238,7 +3238,7 @@ checksum = "003b4612827f6501183873fb0735da92157e3c7daa71c40921c7d2758fec2229"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -3282,7 +3282,7 @@ dependencies = [
"phf", "phf",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -3812,7 +3812,7 @@ dependencies = [
"phf_shared", "phf_shared",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -3841,7 +3841,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -3864,14 +3864,14 @@ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]] [[package]]
name = "pnp" name = "pnp"
version = "0.12.4" version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7adbc1ab7344e1e77be663e91cb129e989e398c319df7a9b8dbda9dd6758df38" checksum = "2acd0b1e3a154e7c4610b9ab31491c32e9f47db2adc0c12047301f3bacc71597"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"concurrent_lru", "concurrent_lru",
"fancy-regex", "fancy-regex",
"miniz_oxide", "flate2",
"pathdiff", "pathdiff",
"radix_trie", "radix_trie",
"rustc-hash", "rustc-hash",
@@ -3954,7 +3954,7 @@ checksum = "7347867d0a7e1208d93b46767be83e2b8f978c3dad35f775ac8d8847551d6fe1"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4108,9 +4108,9 @@ dependencies = [
[[package]] [[package]]
name = "rawdb" name = "rawdb"
version = "0.3.4" version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c184e0f054f8b9018424fb226dbf122ceae050eb5f900e45d0809ff9fdc7b0d0" checksum = "9c06cc46cc4b6da16c9981293906062230f822879c2c88367a40673db5e44414"
dependencies = [ dependencies = [
"libc", "libc",
"memmap2", "memmap2",
@@ -4138,15 +4138,6 @@ dependencies = [
"crossbeam-utils", "crossbeam-utils",
] ]
[[package]]
name = "redb"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae323eb086579a3769daa2c753bb96deb95993c534711e0dbe881b5192906a06"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "redox_syscall" name = "redox_syscall"
version = "0.2.16" version = "0.2.16"
@@ -4182,7 +4173,7 @@ checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4270,7 +4261,7 @@ checksum = "bd83f5f173ff41e00337d97f6572e416d022ef8a19f371817259ae960324c482"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4363,9 +4354,9 @@ dependencies = [
[[package]] [[package]]
name = "schemars" name = "schemars"
version = "1.0.5" version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1317c3bf3e7df961da95b0a56a172a02abead31276215a0497241a7624b487ce" checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289"
dependencies = [ dependencies = [
"chrono", "chrono",
"dyn-clone", "dyn-clone",
@@ -4378,14 +4369,14 @@ dependencies = [
[[package]] [[package]]
name = "schemars_derive" name = "schemars_derive"
version = "1.0.5" version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f760a6150d45dd66ec044983c124595ae76912e77ed0b44124cb3e415cce5d9" checksum = "301858a4023d78debd2353c7426dc486001bddc91ae31a76fb1f55132f7e2633"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"serde_derive_internals", "serde_derive_internals",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4484,7 +4475,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4495,7 +4486,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4749,7 +4740,7 @@ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4774,9 +4765,9 @@ dependencies = [
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.108" version = "2.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917" checksum = "2f17c7e013e88258aa9543dcbe81aca68a667a9ac37cd69c9fbc07858bfe0e2f"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -4797,7 +4788,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4865,7 +4856,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4876,7 +4867,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -4955,7 +4946,7 @@ checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -5101,7 +5092,7 @@ checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -5191,7 +5182,7 @@ checksum = "ee6ff59666c9cbaec3533964505d39154dc4e0a56151fdea30a09ed0301f62e2"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
"termcolor", "termcolor",
] ]
@@ -5330,9 +5321,9 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]] [[package]]
name = "vecdb" name = "vecdb"
version = "0.3.4" version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44fef339cc0a6103cea9df44c1133d8275585b90dbe2faa6e3f60e6d2bac47b6" checksum = "f0cff5037d7e5d66344dff249c366a527b530ef3c723e04f4a90fa11d5f6a9fc"
dependencies = [ dependencies = [
"allocative", "allocative",
"ctrlc", "ctrlc",
@@ -5349,12 +5340,12 @@ dependencies = [
[[package]] [[package]]
name = "vecdb_derive" name = "vecdb_derive"
version = "0.3.4" version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c09556497d1695d5ba268ebeaa1d2547c2c597a60098fde60fe78028b463c4c1" checksum = "9f73046e065e63abb2ad8d1616a08ca8acf7d543f04ffb0222f6fc44e0d3eeec"
dependencies = [ dependencies = [
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -5441,7 +5432,7 @@ dependencies = [
"bumpalo", "bumpalo",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@@ -5544,7 +5535,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -5555,7 +5546,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -5818,7 +5809,7 @@ checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
"synstructure", "synstructure",
] ]
@@ -5839,7 +5830,7 @@ checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]
@@ -5859,7 +5850,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
"synstructure", "synstructure",
] ]
@@ -5893,7 +5884,7 @@ checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.108", "syn 2.0.109",
] ]
[[package]] [[package]]

View File

@@ -52,8 +52,8 @@ brk_store = { version = "0.0.111", path = "crates/brk_store" }
brk_types = { version = "0.0.111", path = "crates/brk_types" } brk_types = { version = "0.0.111", path = "crates/brk_types" }
brk_traversable = { version = "0.0.111", path = "crates/brk_traversable", features = ["derive"] } brk_traversable = { version = "0.0.111", path = "crates/brk_traversable", features = ["derive"] }
brk_traversable_derive = { version = "0.0.111", path = "crates/brk_traversable_derive" } brk_traversable_derive = { version = "0.0.111", path = "crates/brk_traversable_derive" }
# byteview = "=0.6.1" byteview = "=0.6.1"
byteview = "~0.8.0" # byteview = "~0.8.0"
derive_deref = "1.1.1" derive_deref = "1.1.1"
fjall2 = { version = "2.11.5", package = "brk_fjall" } fjall2 = { version = "2.11.5", package = "brk_fjall" }
# fjall2 = { path = "../fjall2", package = "brk_fjall" } # fjall2 = { path = "../fjall2", package = "brk_fjall" }
@@ -66,9 +66,8 @@ log = "0.4.28"
minreq = { version = "2.14.1", features = ["https", "serde_json"] } minreq = { version = "2.14.1", features = ["https", "serde_json"] }
parking_lot = "0.12.5" parking_lot = "0.12.5"
rayon = "1.11.0" rayon = "1.11.0"
redb = "3.1.0"
rustc-hash = "2.1.1" rustc-hash = "2.1.1"
schemars = "1.0.5" schemars = "1.1.0"
serde = "1.0.228" serde = "1.0.228"
serde_bytes = "0.11.19" serde_bytes = "0.11.19"
serde_derive = "1.0.228" serde_derive = "1.0.228"
@@ -76,7 +75,7 @@ serde_json = { version = "1.0.145", features = ["float_roundtrip"] }
sonic-rs = "0.5.6" sonic-rs = "0.5.6"
tokio = { version = "1.48.0", features = ["rt-multi-thread"] } tokio = { version = "1.48.0", features = ["rt-multi-thread"] }
# vecdb = { path = "../seqdb/crates/vecdb", features = ["derive"] } # vecdb = { path = "../seqdb/crates/vecdb", features = ["derive"] }
vecdb = { version = "0.3.4", features = ["derive"] } vecdb = { version = "0.3.7", features = ["derive"] }
zerocopy = { version = "0.8.27", features = ["derive"] } zerocopy = { version = "0.8.27", features = ["derive"] }
[workspace.metadata.release] [workspace.metadata.release]
@@ -88,6 +87,6 @@ tag-message = "release: v{{version}}"
[workspace.metadata.dist] [workspace.metadata.dist]
cargo-dist-version = "0.30.2" cargo-dist-version = "0.30.2"
ci = "github" ci = "github"
# allow-dirty = ["ci"] allow-dirty = ["ci"]
installers = [] installers = []
targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "x86_64-unknown-linux-gnu"] targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "x86_64-unknown-linux-gnu"]

View File

@@ -15,22 +15,3 @@ pub fn dot_brk_log_path() -> PathBuf {
pub fn default_brk_path() -> PathBuf { pub fn default_brk_path() -> PathBuf {
dot_brk_path() dot_brk_path()
} }
pub fn default_bitcoin_path() -> PathBuf {
if env::consts::OS == "macos" {
default_mac_bitcoin_path()
} else {
default_linux_bitcoin_path()
}
}
pub fn default_linux_bitcoin_path() -> PathBuf {
Path::new(&std::env::var("HOME").unwrap()).join(".bitcoin")
}
pub fn default_mac_bitcoin_path() -> PathBuf {
Path::new(&std::env::var("HOME").unwrap())
.join("Library")
.join("Application Support")
.join("Bitcoin")
}

View File

@@ -1,4 +1,5 @@
use std::{ use std::{
env,
path::Path, path::Path,
thread::{self, sleep}, thread::{self, sleep},
time::{Duration, Instant}, time::{Duration, Instant},
@@ -25,17 +26,14 @@ pub fn main() -> Result<()> {
fn run() -> Result<()> { fn run() -> Result<()> {
brk_logger::init(Some(Path::new(".log")))?; brk_logger::init(Some(Path::new(".log")))?;
let bitcoin_dir = Path::new(&std::env::var("HOME").unwrap()) let bitcoin_dir = Client::default_bitcoin_path();
.join("Library")
.join("Application Support")
.join("Bitcoin");
// let bitcoin_dir = Path::new("/Volumes/WD_BLACK/bitcoin"); // let bitcoin_dir = Path::new("/Volumes/WD_BLACK/bitcoin");
let outputs_dir = Path::new(&std::env::var("HOME").unwrap()).join(".brk"); let outputs_dir = Path::new(&env::var("HOME").unwrap()).join(".brk");
// let outputs_dir = Path::new("../../_outputs"); // let outputs_dir = Path::new("../../_outputs");
let client = Client::new( let client = Client::new(
"http://localhost:8332", Client::default_url(),
Auth::CookieFile(bitcoin_dir.join(".cookie")), Auth::CookieFile(bitcoin_dir.join(".cookie")),
)?; )?;

View File

@@ -6,7 +6,8 @@ use brk_reader::Reader;
use brk_traversable::Traversable; use brk_traversable::Traversable;
use brk_types::{BlkPosition, Height, TxIndex, Version}; use brk_types::{BlkPosition, Height, TxIndex, Version};
use vecdb::{ use vecdb::{
AnyStoredVec, AnyVec, CompressedVec, Database, Exit, GenericStoredVec, PAGE_SIZE, VecIterator, AnyStoredVec, AnyVec, CompressedVec, Database, Exit, GenericStoredVec, PAGE_SIZE,
VecIteratorExtended,
}; };
use super::Indexes; use super::Indexes;
@@ -58,7 +59,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.compute_(indexer, starting_indexes, reader, exit)?; self.compute_(indexer, starting_indexes, reader, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(()) Ok(())
} }
@@ -75,14 +76,14 @@ impl Vecs {
let Some(min_height) = indexer let Some(min_height) = indexer
.vecs .vecs
.txindex_to_height .txindex_to_height
.iter() .iter()?
.get_inner(min_txindex) .get(min_txindex)
.map(|h| h.min(starting_indexes.height)) .map(|h| h.min(starting_indexes.height))
else { else {
return Ok(()); return Ok(());
}; };
let mut height_to_first_txindex_iter = indexer.vecs.height_to_first_txindex.iter(); let mut height_to_first_txindex_iter = indexer.vecs.height_to_first_txindex.iter()?;
parser parser
.read( .read(
@@ -99,7 +100,7 @@ impl Vecs {
exit, exit,
)?; )?;
let txindex = height_to_first_txindex_iter.unwrap_get_inner(height); let txindex = height_to_first_txindex_iter.get_unwrap(height);
block.tx_metadata().iter().enumerate().try_for_each( block.tx_metadata().iter().enumerate().try_for_each(
|(index, metadata)| -> Result<()> { |(index, metadata)| -> Result<()> {

View File

@@ -12,7 +12,7 @@ use brk_types::{
}; };
use vecdb::{ use vecdb::{
AnyCloneableIterableVec, AnyIterableVec, Database, EagerVec, Exit, LazyVecFrom1, LazyVecFrom2, AnyCloneableIterableVec, AnyIterableVec, Database, EagerVec, Exit, LazyVecFrom1, LazyVecFrom2,
LazyVecFrom3, PAGE_SIZE, StoredIndex, VecIterator, LazyVecFrom3, PAGE_SIZE, StoredIndex, VecIterator, VecIteratorExtended,
}; };
use crate::grouped::{ use crate::grouped::{
@@ -1123,7 +1123,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.compute_(indexer, indexes, starting_indexes, price, exit)?; self.compute_(indexer, indexes, starting_indexes, price, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(()) Ok(())
} }
@@ -1152,9 +1152,8 @@ impl Vecs {
starting_indexes.height, starting_indexes.height,
&indexes.height_to_timestamp_fixed, &indexes.height_to_timestamp_fixed,
|(h, t, ..)| { |(h, t, ..)| {
while t.difference_in_days_between( while t.difference_in_days_between(height_to_timestamp_fixed_iter.unsafe_get(prev))
height_to_timestamp_fixed_iter.unwrap_get_inner(prev), > 0
) > 0
{ {
prev.increment(); prev.increment();
if prev > h { if prev > h {
@@ -1216,7 +1215,7 @@ impl Vecs {
&indexer.vecs.height_to_timestamp, &indexer.vecs.height_to_timestamp,
|(height, timestamp, ..)| { |(height, timestamp, ..)| {
let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| { let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| {
let prev_timestamp = height_to_timestamp_iter.unwrap_get_inner(prev_h); let prev_timestamp = height_to_timestamp_iter.unsafe_get(prev_h);
timestamp timestamp
.checked_sub(prev_timestamp) .checked_sub(prev_timestamp)
.unwrap_or(Timestamp::ZERO) .unwrap_or(Timestamp::ZERO)
@@ -1266,19 +1265,19 @@ impl Vecs {
Some(&self.height_to_vbytes), Some(&self.height_to_vbytes),
)?; )?;
let mut height_to_timestamp_iter = indexer.vecs.height_to_timestamp.iter(); let mut height_to_timestamp_iter = indexer.vecs.height_to_timestamp.iter()?;
self.difficultyepoch_to_timestamp.compute_transform( self.difficultyepoch_to_timestamp.compute_transform(
starting_indexes.difficultyepoch, starting_indexes.difficultyepoch,
&indexes.difficultyepoch_to_first_height, &indexes.difficultyepoch_to_first_height,
|(i, h, ..)| (i, height_to_timestamp_iter.unwrap_get_inner(h)), |(i, h, ..)| (i, height_to_timestamp_iter.unsafe_get(h)),
exit, exit,
)?; )?;
self.halvingepoch_to_timestamp.compute_transform( self.halvingepoch_to_timestamp.compute_transform(
starting_indexes.halvingepoch, starting_indexes.halvingepoch,
&indexes.halvingepoch_to_first_height, &indexes.halvingepoch_to_first_height,
|(i, h, ..)| (i, height_to_timestamp_iter.unwrap_get_inner(h)), |(i, h, ..)| (i, height_to_timestamp_iter.unsafe_get(h)),
exit, exit,
)?; )?;
@@ -1292,9 +1291,8 @@ impl Vecs {
|(di, height, ..)| { |(di, height, ..)| {
( (
di, di,
height_to_difficultyepoch_iter.unwrap_get_inner( height_to_difficultyepoch_iter
height + (*height_count_iter.unwrap_get_inner(di) - 1), .unsafe_get(height + (*height_count_iter.unsafe_get(di) - 1)),
),
) )
}, },
exit, exit,
@@ -1312,9 +1310,8 @@ impl Vecs {
|(di, height, ..)| { |(di, height, ..)| {
( (
di, di,
height_to_halvingepoch_iter.unwrap_get_inner( height_to_halvingepoch_iter
height + (*height_count_iter.unwrap_get_inner(di) - 1), .unsafe_get(height + (*height_count_iter.unsafe_get(di) - 1)),
),
) )
}, },
exit, exit,
@@ -1365,7 +1362,7 @@ impl Vecs {
&indexer.vecs.height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
&indexer.vecs.txindex_to_txid, &indexer.vecs.txindex_to_txid,
|txindex| { |txindex| {
let v = txindex_to_txversion_iter.unwrap_get_inner(txindex); let v = txindex_to_txversion_iter.unsafe_get(txindex);
v == txversion v == txversion
}, },
exit, exit,
@@ -1385,7 +1382,7 @@ impl Vecs {
let value = if txoutindex == TxOutIndex::COINBASE { let value = if txoutindex == TxOutIndex::COINBASE {
Sats::MAX Sats::MAX
} else { } else {
txoutindex_to_value_iter.unwrap_get_inner(txoutindex) txoutindex_to_value_iter.unsafe_get(txoutindex)
}; };
(txinindex, value) (txinindex, value)
}, },
@@ -1525,14 +1522,14 @@ impl Vecs {
&indexer.vecs.height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
|(height, txindex, ..)| { |(height, txindex, ..)| {
let first_txoutindex = txindex_to_first_txoutindex_iter let first_txoutindex = txindex_to_first_txoutindex_iter
.unwrap_get_inner(txindex) .unsafe_get(txindex)
.to_usize(); .to_usize();
let output_count = txindex_to_output_count_iter.unwrap_get_inner(txindex); let output_count = txindex_to_output_count_iter.unsafe_get(txindex);
let mut sats = Sats::ZERO; let mut sats = Sats::ZERO;
(first_txoutindex..first_txoutindex + usize::from(output_count)).for_each( (first_txoutindex..first_txoutindex + usize::from(output_count)).for_each(
|txoutindex| { |txoutindex| {
sats += txoutindex_to_value_iter sats += txoutindex_to_value_iter
.unwrap_get_inner(TxOutIndex::from(txoutindex)); .unsafe_get(TxOutIndex::from(txoutindex));
}, },
); );
(height, sats) (height, sats)
@@ -1556,7 +1553,7 @@ impl Vecs {
let range = *h - (*count - 1)..=*h; let range = *h - (*count - 1)..=*h;
let sum = range let sum = range
.map(Height::from) .map(Height::from)
.map(|h| height_to_coinbase_iter.unwrap_get_inner(h)) .map(|h| height_to_coinbase_iter.unsafe_get(h))
.sum::<Sats>(); .sum::<Sats>();
(h, sum) (h, sum)
}, },
@@ -1575,7 +1572,7 @@ impl Vecs {
let range = *h - (*count - 1)..=*h; let range = *h - (*count - 1)..=*h;
let sum = range let sum = range
.map(Height::from) .map(Height::from)
.map(|h| height_to_coinbase_iter.unwrap_get_inner(h)) .map(|h| height_to_coinbase_iter.unsafe_get(h))
.sum::<Dollars>(); .sum::<Dollars>();
(h, sum) (h, sum)
}, },
@@ -1593,7 +1590,7 @@ impl Vecs {
starting_indexes.height, starting_indexes.height,
self.indexes_to_coinbase.sats.height.as_ref().unwrap(), self.indexes_to_coinbase.sats.height.as_ref().unwrap(),
|(height, coinbase, ..)| { |(height, coinbase, ..)| {
let fees = indexes_to_fee_sum_iter.unwrap_get_inner(height); let fees = indexes_to_fee_sum_iter.unsafe_get(height);
(height, coinbase.checked_sub(fees).unwrap()) (height, coinbase.checked_sub(fees).unwrap())
}, },
exit, exit,
@@ -1787,8 +1784,8 @@ impl Vecs {
starting_indexes.height, starting_indexes.height,
self.indexes_to_output_count.height.unwrap_cumulative(), self.indexes_to_output_count.height.unwrap_cumulative(),
|(h, output_count, ..)| { |(h, output_count, ..)| {
let input_count = input_count_iter.unwrap_get_inner(h); let input_count = input_count_iter.unsafe_get(h);
let opreturn_count = opreturn_count_iter.unwrap_get_inner(h); let opreturn_count = opreturn_count_iter.unsafe_get(h);
let block_count = u64::from(h + 1_usize); let block_count = u64::from(h + 1_usize);
// -1 > genesis output is unspendable // -1 > genesis output is unspendable
let mut utxo_count = let mut utxo_count =

View File

@@ -298,7 +298,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.compute_(indexes, starting_indexes, price, chain, stateful, exit)?; self.compute_(indexes, starting_indexes, price, chain, stateful, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(()) Ok(())
} }
@@ -339,7 +339,7 @@ impl Vecs {
starting_indexes.height, starting_indexes.height,
self.indexes_to_coinblocks_created.height.as_ref().unwrap(), self.indexes_to_coinblocks_created.height.as_ref().unwrap(),
|(i, created, ..)| { |(i, created, ..)| {
let destroyed = coinblocks_destroyed_iter.unwrap_get_inner(i); let destroyed = coinblocks_destroyed_iter.unsafe_get(i);
(i, created.checked_sub(destroyed).unwrap()) (i, created.checked_sub(destroyed).unwrap())
}, },
exit, exit,

View File

@@ -177,7 +177,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.compute_(indexes, starting_indexes, exit)?; self.compute_(indexes, starting_indexes, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(()) Ok(())
} }

View File

@@ -60,7 +60,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.compute_(indexer, indexes, starting_indexes, exit)?; self.compute_(indexer, indexes, starting_indexes, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(()) Ok(())
} }
@@ -76,16 +76,18 @@ impl Vecs {
.height .height
.min(Height::from(self.height_to_price_ohlc_in_cents.len())); .min(Height::from(self.height_to_price_ohlc_in_cents.len()));
height_to_timestamp height_to_timestamp
.iter_at(index) .iter()?
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, v)| -> Result<()> { .try_for_each(|(i, v)| -> Result<()> {
self.height_to_price_ohlc_in_cents.forced_push_at( self.height_to_price_ohlc_in_cents.forced_push_at(
i, i.into(),
self.fetcher self.fetcher
.get_height( .get_height(
i, i.into(),
v, v,
i.decremented().map(|prev_i| { i.decremented().map(|prev_i| {
height_to_timestamp.into_iter().unwrap_get_inner(prev_i) height_to_timestamp.into_iter().get_unwrap_at(prev_i)
}), }),
) )
.unwrap(), .unwrap(),
@@ -101,14 +103,16 @@ impl Vecs {
let mut prev = None; let mut prev = None;
indexes indexes
.dateindex_to_date .dateindex_to_date
.iter_at(index) .iter()
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, d)| -> Result<()> { .try_for_each(|(i, d)| -> Result<()> {
if prev.is_none() { if prev.is_none() {
let i = i.to_usize(); let i = i.to_usize();
prev.replace(if i > 0 { prev.replace(if i > 0 {
self.dateindex_to_price_ohlc_in_cents self.dateindex_to_price_ohlc_in_cents
.into_iter() .into_iter()
.unwrap_get_inner_(i - 1) .get_unwrap_at(i - 1)
} else { } else {
OHLCCents::default() OHLCCents::default()
}); });
@@ -129,7 +133,7 @@ impl Vecs {
prev.replace(ohlc.clone()); prev.replace(ohlc.clone());
self.dateindex_to_price_ohlc_in_cents self.dateindex_to_price_ohlc_in_cents
.forced_push_at(i, ohlc, exit)?; .forced_push_at_(i, ohlc, exit)?;
Ok(()) Ok(())
})?; })?;

View File

@@ -219,9 +219,13 @@ where
let cumulative_vec = self.cumulative.as_mut().unwrap(); let cumulative_vec = self.cumulative.as_mut().unwrap();
let mut cumulative = index.decremented().map_or(T::from(0_usize), |index| { let mut cumulative = index.decremented().map_or(T::from(0_usize), |index| {
cumulative_vec.iter().unwrap_get_inner(index) cumulative_vec.iter().unsafe_get(index)
}); });
source.iter_at(index).try_for_each(|(i, v)| -> Result<()> { source
.iter()
.skip(index)
.enumerate()
.try_for_each(|(i, v)| -> Result<()> {
cumulative += v; cumulative += v;
cumulative_vec.forced_push_at(i, cumulative, exit)?; cumulative_vec.forced_push_at(i, cumulative, exit)?;
Ok(()) Ok(())
@@ -256,14 +260,13 @@ where
let mut cumulative = cumulative_vec.map(|cumulative_vec| { let mut cumulative = cumulative_vec.map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |index| { index.decremented().map_or(T::from(0_usize), |index| {
cumulative_vec.iter().unwrap_get_inner(index) cumulative_vec.iter().unsafe_get(index)
}) })
}); });
first_indexes first_indexes.iter().skip(index).enumerate().try_for_each(
.iter_at(index) |(index, first_index)| -> Result<()> {
.try_for_each(|(index, first_index)| -> Result<()> { let count_index = count_indexes_iter.unsafe_get(index);
let count_index = count_indexes_iter.unwrap_get_inner(index);
if let Some(first) = self.first.as_mut() { if let Some(first) = self.first.as_mut() {
let f = source_iter let f = source_iter
@@ -278,7 +281,7 @@ where
panic!("should compute last if count can be 0") panic!("should compute last if count can be 0")
} }
let last_index = first_index + (count_index - 1); let last_index = first_index + (count_index - 1);
let v = source_iter.unwrap_get_inner(last_index); let v = source_iter.unsafe_get(last_index);
// .context("to work") // .context("to work")
// .inspect_err(|_| { // .inspect_err(|_| {
// dbg!(first_index, count_index, last_index); // dbg!(first_index, count_index, last_index);
@@ -382,7 +385,8 @@ where
} }
Ok(()) Ok(())
})?; },
)?;
self.safe_flush(exit)?; self.safe_flush(exit)?;
@@ -427,20 +431,16 @@ where
let mut cumulative = self.cumulative.as_mut().map(|cumulative_vec| { let mut cumulative = self.cumulative.as_mut().map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |index| { index.decremented().map_or(T::from(0_usize), |index| {
cumulative_vec.iter().unwrap_get_inner(index) cumulative_vec.iter().unsafe_get(index)
}) })
}); });
first_indexes first_indexes.iter().skip(index).enumerate().try_for_each(
.iter_at(index) |(index, first_index, ..)| -> Result<()> {
.try_for_each(|(index, first_index, ..)| -> Result<()> { let count_index = count_indexes_iter.unsafe_get(index);
let count_index = count_indexes_iter.unwrap_get_inner(index);
if let Some(first) = self.first.as_mut() { if let Some(first) = self.first.as_mut() {
let v = source_first_iter let v = source_first_iter.as_mut().unwrap().unsafe_get(first_index);
.as_mut()
.unwrap()
.unwrap_get_inner(first_index);
first.forced_push_at(index, v, exit)?; first.forced_push_at(index, v, exit)?;
} }
@@ -450,10 +450,7 @@ where
panic!("should compute last if count can be 0") panic!("should compute last if count can be 0")
} }
let last_index = first_index + (count_index - 1); let last_index = first_index + (count_index - 1);
let v = source_last_iter let v = source_last_iter.as_mut().unwrap().unsafe_get(last_index);
.as_mut()
.unwrap()
.unwrap_get_inner(last_index);
last.forced_push_at(index, v, exit)?; last.forced_push_at(index, v, exit)?;
} }
@@ -529,7 +526,8 @@ where
} }
Ok(()) Ok(())
})?; },
)?;
self.safe_flush(exit)?; self.safe_flush(exit)?;

View File

@@ -255,132 +255,77 @@ impl ComputedVecsFromTxindex<Bitcoin> {
if let Some(first) = self.height.first.as_mut() { if let Some(first) = self.height.first.as_mut() {
first.forced_push_at( first.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_first().into_iter().unsafe_get(height)),
sats.height
.unwrap_first()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(average) = self.height.average.as_mut() { if let Some(average) = self.height.average.as_mut() {
average.forced_push_at( average.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_average().into_iter().unsafe_get(height)),
sats.height
.unwrap_average()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(sum) = self.height.sum.as_mut() { if let Some(sum) = self.height.sum.as_mut() {
sum.forced_push_at( sum.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_sum().into_iter().unsafe_get(height)),
sats.height
.unwrap_sum()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(max) = self.height.max.as_mut() { if let Some(max) = self.height.max.as_mut() {
max.forced_push_at( max.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_max().into_iter().unsafe_get(height)),
sats.height
.unwrap_max()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(pct90) = self.height.pct90.as_mut() { if let Some(pct90) = self.height.pct90.as_mut() {
pct90.forced_push_at( pct90.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_pct90().into_iter().unsafe_get(height)),
sats.height
.unwrap_pct90()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(pct75) = self.height.pct75.as_mut() { if let Some(pct75) = self.height.pct75.as_mut() {
pct75.forced_push_at( pct75.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_pct75().into_iter().unsafe_get(height)),
sats.height
.unwrap_pct75()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(median) = self.height.median.as_mut() { if let Some(median) = self.height.median.as_mut() {
median.forced_push_at( median.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_median().into_iter().unsafe_get(height)),
sats.height
.unwrap_median()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(pct25) = self.height.pct25.as_mut() { if let Some(pct25) = self.height.pct25.as_mut() {
pct25.forced_push_at( pct25.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_pct25().into_iter().unsafe_get(height)),
sats.height
.unwrap_pct25()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(pct10) = self.height.pct10.as_mut() { if let Some(pct10) = self.height.pct10.as_mut() {
pct10.forced_push_at( pct10.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_pct10().into_iter().unsafe_get(height)),
sats.height
.unwrap_pct10()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(min) = self.height.min.as_mut() { if let Some(min) = self.height.min.as_mut() {
min.forced_push_at( min.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_min().into_iter().unsafe_get(height)),
sats.height
.unwrap_min()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
if let Some(last) = self.height.last.as_mut() { if let Some(last) = self.height.last.as_mut() {
last.forced_push_at( last.forced_push_at(
height, height,
Bitcoin::from( Bitcoin::from(sats.height.unwrap_last().into_iter().unsafe_get(height)),
sats.height
.unwrap_last()
.into_iter()
.unwrap_get_inner(height),
),
exit, exit,
)?; )?;
} }
@@ -391,7 +336,7 @@ impl ComputedVecsFromTxindex<Bitcoin> {
sats.height sats.height
.unwrap_cumulative() .unwrap_cumulative()
.into_iter() .into_iter()
.unwrap_get_inner(height), .unsafe_get(height),
), ),
exit, exit,
)?; )?;
@@ -433,17 +378,12 @@ impl ComputedVecsFromTxindex<Dollars> {
(starting_index.to_usize()..indexer.vecs.height_to_weight.len()) (starting_index.to_usize()..indexer.vecs.height_to_weight.len())
.map(Height::from) .map(Height::from)
.try_for_each(|height| -> Result<()> { .try_for_each(|height| -> Result<()> {
let price = *close_iter.unwrap_get_inner(height); let price = *close_iter.unsafe_get(height);
if let Some(first) = self.height.first.as_mut() { if let Some(first) = self.height.first.as_mut() {
first.forced_push_at( first.forced_push_at(
height, height,
price price * bitcoin.height.unwrap_first().into_iter().unsafe_get(height),
* bitcoin
.height
.unwrap_first()
.into_iter()
.unwrap_get_inner(height),
exit, exit,
)?; )?;
} }
@@ -455,55 +395,35 @@ impl ComputedVecsFromTxindex<Dollars> {
.height .height
.unwrap_average() .unwrap_average()
.into_iter() .into_iter()
.unwrap_get_inner(height), .unsafe_get(height),
exit, exit,
)?; )?;
} }
if let Some(sum) = self.height.sum.as_mut() { if let Some(sum) = self.height.sum.as_mut() {
sum.forced_push_at( sum.forced_push_at(
height, height,
price price * bitcoin.height.unwrap_sum().into_iter().unsafe_get(height),
* bitcoin
.height
.unwrap_sum()
.into_iter()
.unwrap_get_inner(height),
exit, exit,
)?; )?;
} }
if let Some(max) = self.height.max.as_mut() { if let Some(max) = self.height.max.as_mut() {
max.forced_push_at( max.forced_push_at(
height, height,
price price * bitcoin.height.unwrap_max().into_iter().unsafe_get(height),
* bitcoin
.height
.unwrap_max()
.into_iter()
.unwrap_get_inner(height),
exit, exit,
)?; )?;
} }
if let Some(pct90) = self.height.pct90.as_mut() { if let Some(pct90) = self.height.pct90.as_mut() {
pct90.forced_push_at( pct90.forced_push_at(
height, height,
price price * bitcoin.height.unwrap_pct90().into_iter().unsafe_get(height),
* bitcoin
.height
.unwrap_pct90()
.into_iter()
.unwrap_get_inner(height),
exit, exit,
)?; )?;
} }
if let Some(pct75) = self.height.pct75.as_mut() { if let Some(pct75) = self.height.pct75.as_mut() {
pct75.forced_push_at( pct75.forced_push_at(
height, height,
price price * bitcoin.height.unwrap_pct75().into_iter().unsafe_get(height),
* bitcoin
.height
.unwrap_pct75()
.into_iter()
.unwrap_get_inner(height),
exit, exit,
)?; )?;
} }
@@ -515,55 +435,35 @@ impl ComputedVecsFromTxindex<Dollars> {
.height .height
.unwrap_median() .unwrap_median()
.into_iter() .into_iter()
.unwrap_get_inner(height), .unsafe_get(height),
exit, exit,
)?; )?;
} }
if let Some(pct25) = self.height.pct25.as_mut() { if let Some(pct25) = self.height.pct25.as_mut() {
pct25.forced_push_at( pct25.forced_push_at(
height, height,
price price * bitcoin.height.unwrap_pct25().into_iter().unsafe_get(height),
* bitcoin
.height
.unwrap_pct25()
.into_iter()
.unwrap_get_inner(height),
exit, exit,
)?; )?;
} }
if let Some(pct10) = self.height.pct10.as_mut() { if let Some(pct10) = self.height.pct10.as_mut() {
pct10.forced_push_at( pct10.forced_push_at(
height, height,
price price * bitcoin.height.unwrap_pct10().into_iter().unsafe_get(height),
* bitcoin
.height
.unwrap_pct10()
.into_iter()
.unwrap_get_inner(height),
exit, exit,
)?; )?;
} }
if let Some(min) = self.height.min.as_mut() { if let Some(min) = self.height.min.as_mut() {
min.forced_push_at( min.forced_push_at(
height, height,
price price * bitcoin.height.unwrap_min().into_iter().unsafe_get(height),
* bitcoin
.height
.unwrap_min()
.into_iter()
.unwrap_get_inner(height),
exit, exit,
)?; )?;
} }
if let Some(last) = self.height.last.as_mut() { if let Some(last) = self.height.last.as_mut() {
last.forced_push_at( last.forced_push_at(
height, height,
price price * bitcoin.height.unwrap_last().into_iter().unsafe_get(height),
* bitcoin
.height
.unwrap_last()
.into_iter()
.unwrap_get_inner(height),
exit, exit,
)?; )?;
} }
@@ -575,7 +475,7 @@ impl ComputedVecsFromTxindex<Dollars> {
.height .height
.unwrap_cumulative() .unwrap_cumulative()
.into_iter() .into_iter()
.unwrap_get_inner(height), .unsafe_get(height),
exit, exit,
)?; )?;
} }

View File

@@ -394,7 +394,9 @@ impl ComputedRatioVecsFromDateIndex {
.dateindex .dateindex
.as_ref() .as_ref()
.unwrap() .unwrap()
.iter_at(starting_dateindex) .iter()
.skip(starting_dateindex)
.enumerate()
.try_for_each(|(index, ratio)| -> Result<()> { .try_for_each(|(index, ratio)| -> Result<()> {
if index < min_ratio_date { if index < min_ratio_date {
self.ratio_pct5 self.ratio_pct5
@@ -545,7 +547,7 @@ impl ComputedRatioVecsFromDateIndex {
starting_indexes.dateindex, starting_indexes.dateindex,
date_to_price, date_to_price,
|(i, price, ..)| { |(i, price, ..)| {
let multiplier = iter.unwrap_get_inner(i); let multiplier = iter.unsafe_get(i);
(i, price * multiplier) (i, price * multiplier)
}, },
exit, exit,
@@ -562,7 +564,7 @@ impl ComputedRatioVecsFromDateIndex {
starting_indexes.dateindex, starting_indexes.dateindex,
date_to_price, date_to_price,
|(i, price, ..)| { |(i, price, ..)| {
let multiplier = iter.unwrap_get_inner(i); let multiplier = iter.unsafe_get(i);
(i, price * multiplier) (i, price * multiplier)
}, },
exit, exit,

View File

@@ -499,7 +499,9 @@ impl ComputedStandardDeviationVecsFromDateIndex {
let mut m3sd = self.m3sd.as_mut().map(|c| c.dateindex.as_mut().unwrap()); let mut m3sd = self.m3sd.as_mut().map(|c| c.dateindex.as_mut().unwrap());
source source
.iter_at(starting_dateindex) .iter()
.skip(starting_dateindex)
.enumerate()
.try_for_each(|(index, ratio)| -> Result<()> { .try_for_each(|(index, ratio)| -> Result<()> {
if index < min_date { if index < min_date {
self.sd.dateindex.as_mut().unwrap().forced_push_at( self.sd.dateindex.as_mut().unwrap().forced_push_at(
@@ -548,7 +550,7 @@ impl ComputedStandardDeviationVecsFromDateIndex {
let pos = sorted.binary_search(&ratio).unwrap_or_else(|pos| pos); let pos = sorted.binary_search(&ratio).unwrap_or_else(|pos| pos);
sorted.insert(pos, ratio); sorted.insert(pos, ratio);
let avg = sma_iter.unwrap_get_inner(index); let avg = sma_iter.unsafe_get(index);
let population = index.checked_sub(min_date).unwrap().to_usize() as f32 + 1.0; let population = index.checked_sub(min_date).unwrap().to_usize() as f32 + 1.0;
@@ -637,7 +639,7 @@ impl ComputedStandardDeviationVecsFromDateIndex {
starting_indexes.dateindex, starting_indexes.dateindex,
price, price,
|(i, price, ..)| { |(i, price, ..)| {
let multiplier = iter.unwrap_get_inner(i); let multiplier = iter.unsafe_get(i);
(i, price * multiplier) (i, price * multiplier)
}, },
exit, exit,

View File

@@ -509,7 +509,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<Indexes> { ) -> Result<Indexes> {
let idxs = self.compute_(indexer, starting_indexes, exit)?; let idxs = self.compute_(indexer, starting_indexes, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(idxs) Ok(idxs)
} }
@@ -558,7 +558,7 @@ impl Vecs {
prev_timestamp_fixed.replace( prev_timestamp_fixed.replace(
height_to_timestamp_fixed_iter height_to_timestamp_fixed_iter
.into_iter() .into_iter()
.unwrap_get_inner(prev_h), .unsafe_get(prev_h),
); );
} }
let timestamp_fixed = let timestamp_fixed =
@@ -946,12 +946,8 @@ pub struct Indexes {
impl Indexes { impl Indexes {
pub fn update_from_height(&mut self, height: Height, indexes: &Vecs) { pub fn update_from_height(&mut self, height: Height, indexes: &Vecs) {
self.indexes.height = height; self.indexes.height = height;
self.dateindex = DateIndex::try_from( self.dateindex =
indexes DateIndex::try_from(indexes.height_to_date_fixed.into_iter().unsafe_get(height))
.height_to_date_fixed
.into_iter()
.unwrap_get_inner(height),
)
.unwrap(); .unwrap();
self.weekindex = WeekIndex::from(self.dateindex); self.weekindex = WeekIndex::from(self.dateindex);
self.monthindex = MonthIndex::from(self.dateindex); self.monthindex = MonthIndex::from(self.dateindex);

View File

@@ -12,38 +12,38 @@ use log::info;
use vecdb::{Exit, Format}; use vecdb::{Exit, Format};
mod blks; mod blks;
mod chain; // mod chain;
mod cointime; // mod cointime;
mod constants; // mod constants;
mod fetched; mod fetched;
mod grouped; // mod grouped;
mod indexes; mod indexes;
mod market; // mod market;
mod pools; // mod pools;
mod price; // mod price;
mod stateful; // mod stateful;
mod states; mod states;
mod traits; mod traits;
mod utils; mod utils;
use indexes::Indexes; use indexes::Indexes;
pub use pools::*; // pub use pools::*;
pub use states::PriceToAmount; pub use states::PriceToAmount;
use states::*; use states::*;
#[derive(Clone, Traversable)] #[derive(Clone, Traversable)]
pub struct Computer { pub struct Computer {
pub chain: chain::Vecs, pub blks: blks::Vecs,
pub cointime: cointime::Vecs, // pub chain: chain::Vecs,
pub constants: constants::Vecs, // pub cointime: cointime::Vecs,
// pub constants: constants::Vecs,
pub fetched: Option<fetched::Vecs>, pub fetched: Option<fetched::Vecs>,
pub indexes: indexes::Vecs, pub indexes: indexes::Vecs,
pub market: market::Vecs, // pub market: market::Vecs,
pub pools: pools::Vecs, // pub pools: pools::Vecs,
pub blks: blks::Vecs, // pub price: Option<price::Vecs>,
pub price: Option<price::Vecs>, // pub stateful: stateful::Vecs,
pub stateful: stateful::Vecs,
} }
const VERSION: Version = Version::new(4); const VERSION: Version = Version::new(4);
@@ -81,69 +81,69 @@ impl Computer {
Ok((indexes, fetched, blks)) Ok((indexes, fetched, blks))
})?; })?;
let (price, constants, market) = thread::scope(|s| -> Result<_> { // let (price, constants, market) = thread::scope(|s| -> Result<_> {
let constants_handle = big_thread().spawn_scoped(s, || { // let constants_handle = big_thread().spawn_scoped(s, || {
constants::Vecs::forced_import(&computed_path, VERSION, &indexes) // constants::Vecs::forced_import(&computed_path, VERSION, &indexes)
})?; // })?;
let market_handle = big_thread().spawn_scoped(s, || { // let market_handle = big_thread().spawn_scoped(s, || {
market::Vecs::forced_import(&computed_path, VERSION, &indexes) // market::Vecs::forced_import(&computed_path, VERSION, &indexes)
})?; // })?;
let price = fetched // let price = fetched
.is_some() // .is_some()
.then(|| price::Vecs::forced_import(&computed_path, VERSION, &indexes).unwrap()); // .then(|| price::Vecs::forced_import(&computed_path, VERSION, &indexes).unwrap());
let constants = constants_handle.join().unwrap()?; // let constants = constants_handle.join().unwrap()?;
let market = market_handle.join().unwrap()?; // let market = market_handle.join().unwrap()?;
Ok((price, constants, market)) // Ok((price, constants, market))
})?; // })?;
let (chain, pools, cointime) = thread::scope(|s| -> Result<_> { // let (chain, pools, cointime) = thread::scope(|s| -> Result<_> {
let chain_handle = big_thread().spawn_scoped(s, || { // let chain_handle = big_thread().spawn_scoped(s, || {
chain::Vecs::forced_import( // chain::Vecs::forced_import(
&computed_path, // &computed_path,
VERSION, // VERSION,
indexer, // indexer,
&indexes, // &indexes,
price.as_ref(), // price.as_ref(),
) // )
})?; // })?;
let pools_handle = big_thread().spawn_scoped(s, || { // let pools_handle = big_thread().spawn_scoped(s, || {
pools::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref()) // pools::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref())
})?; // })?;
let cointime = // let cointime =
cointime::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref())?; // cointime::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref())?;
let chain = chain_handle.join().unwrap()?; // let chain = chain_handle.join().unwrap()?;
let pools = pools_handle.join().unwrap()?; // let pools = pools_handle.join().unwrap()?;
Ok((chain, pools, cointime)) // Ok((chain, pools, cointime))
})?; // })?;
// Threads inside // // Threads inside
let stateful = stateful::Vecs::forced_import( // let stateful = stateful::Vecs::forced_import(
&computed_path, // &computed_path,
VERSION, // VERSION,
Format::Compressed, // Format::Compressed,
&indexes, // &indexes,
price.as_ref(), // price.as_ref(),
)?; // )?;
Ok(Self { Ok(Self {
constants, // constants,
market, // market,
stateful, // stateful,
chain, // chain,
blks, blks,
pools, // pools,
cointime, // cointime,
indexes, indexes,
fetched, fetched,
price, // price,
}) })
} }
@@ -182,13 +182,6 @@ impl Computer {
Ok(()) Ok(())
}); });
// let blks = scope.spawn(|| -> Result<()> {
// info!("Computing blks...");
// self.blks
// .compute(indexer, &self.indexes, &starting_indexes, parser, exit)?;
// Ok(())
// });
let chain = scope.spawn(|| -> Result<()> { let chain = scope.spawn(|| -> Result<()> {
info!("Computing chain..."); info!("Computing chain...");
self.chain.compute( self.chain.compute(
@@ -207,7 +200,6 @@ impl Computer {
} }
constants.join().unwrap()?; constants.join().unwrap()?;
// blks.join().unwrap()?;
chain.join().unwrap()?; chain.join().unwrap()?;
Ok(()) Ok(())
})?; })?;

View File

@@ -3,7 +3,10 @@ use std::{path::Path, thread};
use brk_error::Result; use brk_error::Result;
use brk_traversable::Traversable; use brk_traversable::Traversable;
use brk_types::{Date, DateIndex, Dollars, Height, Sats, StoredF32, StoredU16, Version}; use brk_types::{Date, DateIndex, Dollars, Height, Sats, StoredF32, StoredU16, Version};
use vecdb::{Database, EagerVec, Exit, PAGE_SIZE, StoredIndex, VecIterator}; use vecdb::{
Database, EagerVec, Exit, GenericStoredVec, PAGE_SIZE, StoredIndex, VecIterator,
VecIteratorExtended,
};
use crate::{ use crate::{
grouped::{ComputedStandardDeviationVecsFromDateIndex, Source, StandardDeviationVecsOptions}, grouped::{ComputedStandardDeviationVecsFromDateIndex, Source, StandardDeviationVecsOptions},
@@ -1532,7 +1535,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.compute_(price, starting_indexes, exit)?; self.compute_(price, starting_indexes, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(()) Ok(())
} }
@@ -1591,12 +1594,12 @@ impl Vecs {
if prev.is_none() { if prev.is_none() {
let i = i.to_usize(); let i = i.to_usize();
prev.replace(if i > 0 { prev.replace(if i > 0 {
slf.into_iter().unwrap_get_inner_(i - 1) slf.one_shot_get_any_or_read_(i - 1).unwrap().unwrap()
} else { } else {
StoredU16::default() StoredU16::default()
}); });
} }
let days = if *high_iter.unwrap_get_inner(i) == ath { let days = if *high_iter.unsafe_get(i) == ath {
StoredU16::default() StoredU16::default()
} else { } else {
prev.unwrap() + StoredU16::new(1) prev.unwrap() + StoredU16::new(1)
@@ -1622,7 +1625,7 @@ impl Vecs {
if prev.is_none() { if prev.is_none() {
let i = i.to_usize(); let i = i.to_usize();
prev.replace(if i > 0 { prev.replace(if i > 0 {
slf.into_iter().unwrap_get_inner_(i - 1) slf.one_shot_get_any_or_read_(i - 1)
} else { } else {
StoredU16::ZERO StoredU16::ZERO
}); });

View File

@@ -81,7 +81,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.compute_(indexer, indexes, starting_indexes, chain, price, exit)?; self.compute_(indexer, indexes, starting_indexes, chain, price, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(()) Ok(())
} }
@@ -155,47 +155,40 @@ impl Vecs {
.iter() .iter()
.skip(min) .skip(min)
.try_for_each(|(height, coinbase_tag)| -> Result<()> { .try_for_each(|(height, coinbase_tag)| -> Result<()> {
let txindex = height_to_first_txindex_iter.unwrap_get_inner(height); let txindex = height_to_first_txindex_iter.unsafe_get(height);
let txoutindex = txindex_to_first_txoutindex_iter.unwrap_get_inner(txindex); let txoutindex = txindex_to_first_txoutindex_iter.unsafe_get(txindex);
let outputcount = txindex_to_output_count_iter.unwrap_get_inner(txindex); let outputcount = txindex_to_output_count_iter.unsafe_get(txindex);
let pool = (*txoutindex..(*txoutindex + *outputcount)) let pool = (*txoutindex..(*txoutindex + *outputcount))
.map(TxOutIndex::from) .map(TxOutIndex::from)
.find_map(|txoutindex| { .find_map(|txoutindex| {
let outputtype = txoutindex_to_outputtype_iter.unwrap_get_inner(txoutindex); let outputtype = txoutindex_to_outputtype_iter.unsafe_get(txoutindex);
let typeindex = txoutindex_to_typeindex_iter.unwrap_get_inner(txoutindex); let typeindex = txoutindex_to_typeindex_iter.unsafe_get(txoutindex);
match outputtype { match outputtype {
OutputType::P2PK65 => Some(AddressBytes::from( OutputType::P2PK65 => Some(AddressBytes::from(
p2pk65addressindex_to_p2pk65bytes_iter p2pk65addressindex_to_p2pk65bytes_iter.unsafe_get(typeindex.into()),
.unwrap_get_inner(typeindex.into()),
)), )),
OutputType::P2PK33 => Some(AddressBytes::from( OutputType::P2PK33 => Some(AddressBytes::from(
p2pk33addressindex_to_p2pk33bytes_iter p2pk33addressindex_to_p2pk33bytes_iter.unsafe_get(typeindex.into()),
.unwrap_get_inner(typeindex.into()),
)), )),
OutputType::P2PKH => Some(AddressBytes::from( OutputType::P2PKH => Some(AddressBytes::from(
p2pkhaddressindex_to_p2pkhbytes_iter p2pkhaddressindex_to_p2pkhbytes_iter.unsafe_get(typeindex.into()),
.unwrap_get_inner(typeindex.into()),
)), )),
OutputType::P2SH => Some(AddressBytes::from( OutputType::P2SH => Some(AddressBytes::from(
p2shaddressindex_to_p2shbytes_iter p2shaddressindex_to_p2shbytes_iter.unsafe_get(typeindex.into()),
.unwrap_get_inner(typeindex.into()),
)), )),
OutputType::P2WPKH => Some(AddressBytes::from( OutputType::P2WPKH => Some(AddressBytes::from(
p2wpkhaddressindex_to_p2wpkhbytes_iter p2wpkhaddressindex_to_p2wpkhbytes_iter.unsafe_get(typeindex.into()),
.unwrap_get_inner(typeindex.into()),
)), )),
OutputType::P2WSH => Some(AddressBytes::from( OutputType::P2WSH => Some(AddressBytes::from(
p2wshaddressindex_to_p2wshbytes_iter p2wshaddressindex_to_p2wshbytes_iter.unsafe_get(typeindex.into()),
.unwrap_get_inner(typeindex.into()),
)), )),
OutputType::P2TR => Some(AddressBytes::from( OutputType::P2TR => Some(AddressBytes::from(
p2traddressindex_to_p2trbytes_iter p2traddressindex_to_p2trbytes_iter.unsafe_get(typeindex.into()),
.unwrap_get_inner(typeindex.into()),
)), )),
OutputType::P2A => Some(AddressBytes::from( OutputType::P2A => Some(AddressBytes::from(
p2aaddressindex_to_p2abytes_iter.unwrap_get_inner(typeindex.into()), p2aaddressindex_to_p2abytes_iter.unsafe_get(typeindex.into()),
)), )),
_ => None, _ => None,
} }

View File

@@ -354,7 +354,7 @@ impl Vecs {
if prev.is_none() { if prev.is_none() {
let i = i.to_usize(); let i = i.to_usize();
prev.replace(if i > 0 { prev.replace(if i > 0 {
slf.into_iter().unwrap_get_inner_(i - 1) slf.one_shot_get_any_or_read_(i - 1)
} else { } else {
StoredU16::ZERO StoredU16::ZERO
}); });

View File

@@ -8,7 +8,7 @@ use brk_types::{
}; };
use vecdb::{ use vecdb::{
AnyIterableVec, AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, PAGE_SIZE, AnyIterableVec, AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, PAGE_SIZE,
RawVec, RawVec, StoredIndex,
}; };
use crate::{fetched, grouped::Source}; use crate::{fetched, grouped::Source};
@@ -343,7 +343,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.compute_(indexes, starting_indexes, fetched, exit)?; self.compute_(indexes, starting_indexes, fetched, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(()) Ok(())
} }
@@ -387,10 +387,15 @@ impl Vecs {
.min(Height::from(self.height_to_price_ohlc.len())); .min(Height::from(self.height_to_price_ohlc.len()));
fetched fetched
.height_to_price_ohlc_in_cents .height_to_price_ohlc_in_cents
.iter_at(index) .iter()?
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, v)| -> Result<()> { .try_for_each(|(i, v)| -> Result<()> {
self.height_to_price_ohlc self.height_to_price_ohlc.forced_push_at(
.forced_push_at(i, OHLCDollars::from(v), exit)?; Height::from(i),
OHLCDollars::from(v),
exit,
)?;
Ok(()) Ok(())
})?; })?;
self.height_to_price_ohlc.safe_flush(exit)?; self.height_to_price_ohlc.safe_flush(exit)?;
@@ -428,10 +433,15 @@ impl Vecs {
.min(DateIndex::from(self.dateindex_to_price_ohlc.len())); .min(DateIndex::from(self.dateindex_to_price_ohlc.len()));
fetched fetched
.dateindex_to_price_ohlc_in_cents .dateindex_to_price_ohlc_in_cents
.iter_at(index) .iter()?
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, v)| -> Result<()> { .try_for_each(|(i, v)| -> Result<()> {
self.dateindex_to_price_ohlc self.dateindex_to_price_ohlc.forced_push_at(
.forced_push_at(i, OHLCDollars::from(v), exit)?; DateIndex::from(i),
OHLCDollars::from(v),
exit,
)?;
Ok(()) Ok(())
})?; })?;
self.dateindex_to_price_ohlc.safe_flush(exit)?; self.dateindex_to_price_ohlc.safe_flush(exit)?;
@@ -537,13 +547,15 @@ impl Vecs {
self.timeindexes_to_price_close self.timeindexes_to_price_close
.weekindex .weekindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
let open = weekindex_first_iter.unwrap_get_inner(i); let open = weekindex_first_iter.unsafe_get_(i);
let high = weekindex_max_iter.unwrap_get_inner(i); let high = weekindex_max_iter.unsafe_get_(i);
let low = weekindex_min_iter.unwrap_get_inner(i); let low = weekindex_min_iter.unsafe_get_(i);
self.weekindex_to_price_ohlc.forced_push_at( self.weekindex_to_price_ohlc.forced_push_at(
i, WeekIndex::from(i),
OHLCDollars { OHLCDollars {
open, open,
high, high,
@@ -577,11 +589,13 @@ impl Vecs {
self.chainindexes_to_price_close self.chainindexes_to_price_close
.difficultyepoch .difficultyepoch
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
let open = difficultyepoch_first_iter.unwrap_get_inner(i); let open = difficultyepoch_first_iter.unsafe_get(i);
let high = difficultyepoch_max_iter.unwrap_get_inner(i); let high = difficultyepoch_max_iter.unsafe_get(i);
let low = difficultyepoch_min_iter.unwrap_get_inner(i); let low = difficultyepoch_min_iter.unsafe_get(i);
self.difficultyepoch_to_price_ohlc.forced_push_at( self.difficultyepoch_to_price_ohlc.forced_push_at(
i, i,
OHLCDollars { OHLCDollars {
@@ -613,11 +627,13 @@ impl Vecs {
self.timeindexes_to_price_close self.timeindexes_to_price_close
.monthindex .monthindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
let open = monthindex_first_iter.unwrap_get_inner(i); let open = monthindex_first_iter.unsafe_get(i);
let high = monthindex_max_iter.unwrap_get_inner(i); let high = monthindex_max_iter.unsafe_get(i);
let low = monthindex_min_iter.unwrap_get_inner(i); let low = monthindex_min_iter.unsafe_get(i);
self.monthindex_to_price_ohlc.forced_push_at( self.monthindex_to_price_ohlc.forced_push_at(
i, i,
OHLCDollars { OHLCDollars {
@@ -653,11 +669,13 @@ impl Vecs {
self.timeindexes_to_price_close self.timeindexes_to_price_close
.quarterindex .quarterindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
let open = quarterindex_first_iter.unwrap_get_inner(i); let open = quarterindex_first_iter.unsafe_get(i);
let high = quarterindex_max_iter.unwrap_get_inner(i); let high = quarterindex_max_iter.unsafe_get(i);
let low = quarterindex_min_iter.unwrap_get_inner(i); let low = quarterindex_min_iter.unsafe_get(i);
self.quarterindex_to_price_ohlc.forced_push_at( self.quarterindex_to_price_ohlc.forced_push_at(
i, i,
OHLCDollars { OHLCDollars {
@@ -693,11 +711,13 @@ impl Vecs {
self.timeindexes_to_price_close self.timeindexes_to_price_close
.semesterindex .semesterindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
let open = semesterindex_first_iter.unwrap_get_inner(i); let open = semesterindex_first_iter.unsafe_get(i);
let high = semesterindex_max_iter.unwrap_get_inner(i); let high = semesterindex_max_iter.unsafe_get(i);
let low = semesterindex_min_iter.unwrap_get_inner(i); let low = semesterindex_min_iter.unsafe_get(i);
self.semesterindex_to_price_ohlc.forced_push_at( self.semesterindex_to_price_ohlc.forced_push_at(
i, i,
OHLCDollars { OHLCDollars {
@@ -725,11 +745,13 @@ impl Vecs {
self.timeindexes_to_price_close self.timeindexes_to_price_close
.yearindex .yearindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
let open = yearindex_first_iter.unwrap_get_inner(i); let open = yearindex_first_iter.unsafe_get(i);
let high = yearindex_max_iter.unwrap_get_inner(i); let high = yearindex_max_iter.unsafe_get(i);
let low = yearindex_min_iter.unwrap_get_inner(i); let low = yearindex_min_iter.unsafe_get(i);
self.yearindex_to_price_ohlc.forced_push_at( self.yearindex_to_price_ohlc.forced_push_at(
i, i,
OHLCDollars { OHLCDollars {
@@ -768,11 +790,13 @@ impl Vecs {
self.timeindexes_to_price_close self.timeindexes_to_price_close
.decadeindex .decadeindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
let open = decadeindex_first_iter.unwrap_get_inner(i); let open = decadeindex_first_iter.unsafe_get(i);
let high = decadeindex_max_iter.unwrap_get_inner(i); let high = decadeindex_max_iter.unsafe_get(i);
let low = decadeindex_min_iter.unwrap_get_inner(i); let low = decadeindex_min_iter.unsafe_get(i);
self.decadeindex_to_price_ohlc.forced_push_at( self.decadeindex_to_price_ohlc.forced_push_at(
i, i,
OHLCDollars { OHLCDollars {
@@ -883,14 +907,16 @@ impl Vecs {
.min(Height::from(self.height_to_price_ohlc_in_sats.len())); .min(Height::from(self.height_to_price_ohlc_in_sats.len()));
self.chainindexes_to_price_close_in_sats self.chainindexes_to_price_close_in_sats
.height .height
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
self.height_to_price_ohlc_in_sats.forced_push_at( self.height_to_price_ohlc_in_sats.forced_push_at(
i, i,
OHLCSats { OHLCSats {
open: height_first_iter.unwrap_get_inner(i), open: height_first_iter.unsafe_get(i),
high: height_max_iter.unwrap_get_inner(i), high: height_max_iter.unsafe_get(i),
low: height_min_iter.unwrap_get_inner(i), low: height_min_iter.unsafe_get(i),
close, close,
}, },
exit, exit,
@@ -924,14 +950,16 @@ impl Vecs {
.dateindex .dateindex
.as_ref() .as_ref()
.unwrap() .unwrap()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
self.dateindex_to_price_ohlc_in_sats.forced_push_at( self.dateindex_to_price_ohlc_in_sats.forced_push_at(
i, i,
OHLCSats { OHLCSats {
open: dateindex_first_iter.unwrap_get_inner(i), open: dateindex_first_iter.unsafe_get(i),
high: dateindex_max_iter.unwrap_get_inner(i), high: dateindex_max_iter.unsafe_get(i),
low: dateindex_min_iter.unwrap_get_inner(i), low: dateindex_min_iter.unsafe_get(i),
close, close,
}, },
exit, exit,
@@ -961,14 +989,16 @@ impl Vecs {
self.timeindexes_to_price_close_in_sats self.timeindexes_to_price_close_in_sats
.weekindex .weekindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
self.weekindex_to_price_ohlc_in_sats.forced_push_at( self.weekindex_to_price_ohlc_in_sats.forced_push_at(
i, i,
OHLCSats { OHLCSats {
open: weekindex_first_iter.unwrap_get_inner(i), open: weekindex_first_iter.unsafe_get(i),
high: weekindex_max_iter.unwrap_get_inner(i), high: weekindex_max_iter.unsafe_get(i),
low: weekindex_min_iter.unwrap_get_inner(i), low: weekindex_min_iter.unsafe_get(i),
close, close,
}, },
exit, exit,
@@ -998,14 +1028,16 @@ impl Vecs {
self.chainindexes_to_price_close_in_sats self.chainindexes_to_price_close_in_sats
.difficultyepoch .difficultyepoch
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
self.difficultyepoch_to_price_ohlc_in_sats.forced_push_at( self.difficultyepoch_to_price_ohlc_in_sats.forced_push_at(
i, i,
OHLCSats { OHLCSats {
open: difficultyepoch_first_iter.unwrap_get_inner(i), open: difficultyepoch_first_iter.unsafe_get(i),
high: difficultyepoch_max_iter.unwrap_get_inner(i), high: difficultyepoch_max_iter.unsafe_get(i),
low: difficultyepoch_min_iter.unwrap_get_inner(i), low: difficultyepoch_min_iter.unsafe_get(i),
close, close,
}, },
exit, exit,
@@ -1036,14 +1068,16 @@ impl Vecs {
self.timeindexes_to_price_close_in_sats self.timeindexes_to_price_close_in_sats
.monthindex .monthindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
self.monthindex_to_price_ohlc_in_sats.forced_push_at( self.monthindex_to_price_ohlc_in_sats.forced_push_at(
i, i,
OHLCSats { OHLCSats {
open: monthindex_first_iter.unwrap_get_inner(i), open: monthindex_first_iter.unsafe_get(i),
high: monthindex_max_iter.unwrap_get_inner(i), high: monthindex_max_iter.unsafe_get(i),
low: monthindex_min_iter.unwrap_get_inner(i), low: monthindex_min_iter.unsafe_get(i),
close, close,
}, },
exit, exit,
@@ -1073,14 +1107,16 @@ impl Vecs {
self.timeindexes_to_price_close_in_sats self.timeindexes_to_price_close_in_sats
.quarterindex .quarterindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
self.quarterindex_to_price_ohlc_in_sats.forced_push_at( self.quarterindex_to_price_ohlc_in_sats.forced_push_at(
i, i,
OHLCSats { OHLCSats {
open: quarterindex_first_iter.unwrap_get_inner(i), open: quarterindex_first_iter.unsafe_get(i),
high: quarterindex_max_iter.unwrap_get_inner(i), high: quarterindex_max_iter.unsafe_get(i),
low: quarterindex_min_iter.unwrap_get_inner(i), low: quarterindex_min_iter.unsafe_get(i),
close, close,
}, },
exit, exit,
@@ -1110,14 +1146,16 @@ impl Vecs {
self.timeindexes_to_price_close_in_sats self.timeindexes_to_price_close_in_sats
.semesterindex .semesterindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
self.semesterindex_to_price_ohlc_in_sats.forced_push_at( self.semesterindex_to_price_ohlc_in_sats.forced_push_at(
i, i,
OHLCSats { OHLCSats {
open: semesterindex_first_iter.unwrap_get_inner(i), open: semesterindex_first_iter.unsafe_get(i),
high: semesterindex_max_iter.unwrap_get_inner(i), high: semesterindex_max_iter.unsafe_get(i),
low: semesterindex_min_iter.unwrap_get_inner(i), low: semesterindex_min_iter.unsafe_get(i),
close, close,
}, },
exit, exit,
@@ -1147,14 +1185,16 @@ impl Vecs {
self.timeindexes_to_price_close_in_sats self.timeindexes_to_price_close_in_sats
.yearindex .yearindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
self.yearindex_to_price_ohlc_in_sats.forced_push_at( self.yearindex_to_price_ohlc_in_sats.forced_push_at(
i, i,
OHLCSats { OHLCSats {
open: yearindex_first_iter.unwrap_get_inner(i), open: yearindex_first_iter.unsafe_get(i),
high: yearindex_max_iter.unwrap_get_inner(i), high: yearindex_max_iter.unsafe_get(i),
low: yearindex_min_iter.unwrap_get_inner(i), low: yearindex_min_iter.unsafe_get(i),
close, close,
}, },
exit, exit,
@@ -1187,14 +1227,16 @@ impl Vecs {
self.timeindexes_to_price_close_in_sats self.timeindexes_to_price_close_in_sats
.decadeindex .decadeindex
.unwrap_last() .unwrap_last()
.iter_at(index) .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, close)| -> Result<()> { .try_for_each(|(i, close)| -> Result<()> {
self.decadeindex_to_price_ohlc_in_sats.forced_push_at( self.decadeindex_to_price_ohlc_in_sats.forced_push_at(
i, i,
OHLCSats { OHLCSats {
open: decadeindex_first_iter.unwrap_get_inner(i), open: decadeindex_first_iter.unsafe_get(i),
high: decadeindex_max_iter.unwrap_get_inner(i), high: decadeindex_max_iter.unsafe_get(i),
low: decadeindex_min_iter.unwrap_get_inner(i), low: decadeindex_min_iter.unsafe_get(i),
close, close,
}, },
exit, exit,

View File

@@ -112,7 +112,7 @@ impl DynCohortVecs for Vecs {
self.state.as_mut().unwrap().addr_count = *self self.state.as_mut().unwrap().addr_count = *self
.height_to_addr_count .height_to_addr_count
.into_iter() .into_iter()
.unwrap_get_inner(prev_height); .unsafe_get(prev_height);
} }
Ok(starting_height) Ok(starting_height)

View File

@@ -13,34 +13,14 @@ impl From<(&AddressTypeToHeightToAddressCount, Height)> for AddressTypeToAddress
fn from((groups, starting_height): (&AddressTypeToHeightToAddressCount, Height)) -> Self { fn from((groups, starting_height): (&AddressTypeToHeightToAddressCount, Height)) -> Self {
if let Some(prev_height) = starting_height.decremented() { if let Some(prev_height) = starting_height.decremented() {
Self(ByAddressType { Self(ByAddressType {
p2pk65: groups p2pk65: groups.p2pk65.into_iter().unsafe_get(prev_height).into(),
.p2pk65 p2pk33: groups.p2pk33.into_iter().unsafe_get(prev_height).into(),
.into_iter() p2pkh: groups.p2pkh.into_iter().unsafe_get(prev_height).into(),
.unwrap_get_inner(prev_height) p2sh: groups.p2sh.into_iter().unsafe_get(prev_height).into(),
.into(), p2wpkh: groups.p2wpkh.into_iter().unsafe_get(prev_height).into(),
p2pk33: groups p2wsh: groups.p2wsh.into_iter().unsafe_get(prev_height).into(),
.p2pk33 p2tr: groups.p2tr.into_iter().unsafe_get(prev_height).into(),
.into_iter() p2a: groups.p2a.into_iter().unsafe_get(prev_height).into(),
.unwrap_get_inner(prev_height)
.into(),
p2pkh: groups
.p2pkh
.into_iter()
.unwrap_get_inner(prev_height)
.into(),
p2sh: groups.p2sh.into_iter().unwrap_get_inner(prev_height).into(),
p2wpkh: groups
.p2wpkh
.into_iter()
.unwrap_get_inner(prev_height)
.into(),
p2wsh: groups
.p2wsh
.into_iter()
.unwrap_get_inner(prev_height)
.into(),
p2tr: groups.p2tr.into_iter().unwrap_get_inner(prev_height).into(),
p2a: groups.p2a.into_iter().unwrap_get_inner(prev_height).into(),
}) })
} else { } else {
Default::default() Default::default()

View File

@@ -1338,19 +1338,15 @@ impl Vecs {
prev_height = state.import_at_or_before(prev_height)?; prev_height = state.import_at_or_before(prev_height)?;
} }
state.supply.value = self state.supply.value = self.height_to_supply.into_iter().unsafe_get(prev_height);
.height_to_supply
.into_iter()
.unwrap_get_inner(prev_height);
state.supply.utxo_count = *self state.supply.utxo_count = *self
.height_to_utxo_count .height_to_utxo_count
.into_iter() .into_iter()
.unwrap_get_inner(prev_height); .unsafe_get(prev_height);
if let Some(height_to_realized_cap) = self.height_to_realized_cap.as_mut() { if let Some(height_to_realized_cap) = self.height_to_realized_cap.as_mut() {
state.realized.as_mut().unwrap().cap = height_to_realized_cap state.realized.as_mut().unwrap().cap =
.into_iter() height_to_realized_cap.into_iter().unsafe_get(prev_height);
.unwrap_get_inner(prev_height);
} }
Ok(prev_height.incremented()) Ok(prev_height.incremented())
@@ -2106,11 +2102,11 @@ impl Vecs {
starting_indexes.dateindex, starting_indexes.dateindex,
&indexes.dateindex_to_first_height, &indexes.dateindex_to_first_height,
|(i, height, ..)| { |(i, height, ..)| {
let count = dateindex_to_height_count_iter.unwrap_get_inner(i); let count = dateindex_to_height_count_iter.unsafe_get(i);
if count == StoredU64::default() { if count == StoredU64::default() {
unreachable!() unreachable!()
} }
let supply = height_to_supply_iter.unwrap_get_inner(height + (*count - 1)); let supply = height_to_supply_iter.unsafe_get(height + (*count - 1));
(i, supply) (i, supply)
}, },
exit, exit,

View File

@@ -502,7 +502,7 @@ impl Vecs {
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.compute_(indexer, indexes, chain, price, starting_indexes, exit)?; self.compute_(indexer, indexes, chain, price, starting_indexes, exit)?;
self.db.flush_then_punch()?; self.db.compact()?;
Ok(()) Ok(())
} }
@@ -712,10 +712,10 @@ impl Vecs {
.enumerate() .enumerate()
.map(|(height, supply)| { .map(|(height, supply)| {
let height = Height::from(height); let height = Height::from(height);
let timestamp = height_to_timestamp_fixed_iter.unwrap_get_inner(height); let timestamp = height_to_timestamp_fixed_iter.unsafe_get(height);
let price = height_to_price_close_iter let price = height_to_price_close_iter
.as_mut() .as_mut()
.map(|i| *i.unwrap_get_inner(height)); .map(|i| *i.unsafe_get(height));
BlockState { BlockState {
timestamp, timestamp,
price, price,
@@ -794,14 +794,14 @@ impl Vecs {
let mut unspendable_supply = if let Some(prev_height) = starting_height.decremented() { let mut unspendable_supply = if let Some(prev_height) = starting_height.decremented() {
self.height_to_unspendable_supply self.height_to_unspendable_supply
.into_iter() .into_iter()
.unwrap_get_inner(prev_height) .unsafe_get(prev_height)
} else { } else {
Sats::ZERO Sats::ZERO
}; };
let mut opreturn_supply = if let Some(prev_height) = starting_height.decremented() { let mut opreturn_supply = if let Some(prev_height) = starting_height.decremented() {
self.height_to_opreturn_supply self.height_to_opreturn_supply
.into_iter() .into_iter()
.unwrap_get_inner(prev_height) .unsafe_get(prev_height)
} else { } else {
Sats::ZERO Sats::ZERO
}; };
@@ -847,20 +847,18 @@ impl Vecs {
v.state.as_mut().unwrap().reset_single_iteration_values() v.state.as_mut().unwrap().reset_single_iteration_values()
}); });
let timestamp = height_to_timestamp_fixed_iter.unwrap_get_inner(height); let timestamp = height_to_timestamp_fixed_iter.unsafe_get(height);
let price = height_to_price_close_iter let price = height_to_price_close_iter
.as_mut() .as_mut()
.map(|i| *i.unwrap_get_inner(height)); .map(|i| *i.unsafe_get(height));
let first_txindex = height_to_first_txindex_iter.unwrap_get_inner(height); let first_txindex = height_to_first_txindex_iter.unsafe_get(height);
let first_txoutindex = height_to_first_txoutindex_iter let first_txoutindex = height_to_first_txoutindex_iter
.unwrap_get_inner(height) .unsafe_get(height)
.to_usize(); .to_usize();
let first_txinindex = height_to_first_txinindex_iter let first_txinindex = height_to_first_txinindex_iter.unsafe_get(height).to_usize();
.unwrap_get_inner(height) let tx_count = height_to_tx_count_iter.unsafe_get(height);
.to_usize(); let output_count = height_to_output_count_iter.unsafe_get(height);
let tx_count = height_to_tx_count_iter.unwrap_get_inner(height); let input_count = height_to_input_count_iter.unsafe_get(height);
let output_count = height_to_output_count_iter.unwrap_get_inner(height);
let input_count = height_to_input_count_iter.unwrap_get_inner(height);
let txoutindex_to_txindex = build_txoutindex_to_txindex( let txoutindex_to_txindex = build_txoutindex_to_txindex(
first_txindex, first_txindex,
@@ -876,28 +874,28 @@ impl Vecs {
let first_addressindexes: ByAddressType<TypeIndex> = ByAddressType { let first_addressindexes: ByAddressType<TypeIndex> = ByAddressType {
p2a: height_to_first_p2aaddressindex_iter p2a: height_to_first_p2aaddressindex_iter
.unwrap_get_inner(height) .unsafe_get(height)
.into(), .into(),
p2pk33: height_to_first_p2pk33addressindex_iter p2pk33: height_to_first_p2pk33addressindex_iter
.unwrap_get_inner(height) .unsafe_get(height)
.into(), .into(),
p2pk65: height_to_first_p2pk65addressindex_iter p2pk65: height_to_first_p2pk65addressindex_iter
.unwrap_get_inner(height) .unsafe_get(height)
.into(), .into(),
p2pkh: height_to_first_p2pkhaddressindex_iter p2pkh: height_to_first_p2pkhaddressindex_iter
.unwrap_get_inner(height) .unsafe_get(height)
.into(), .into(),
p2sh: height_to_first_p2shaddressindex_iter p2sh: height_to_first_p2shaddressindex_iter
.unwrap_get_inner(height) .unsafe_get(height)
.into(), .into(),
p2tr: height_to_first_p2traddressindex_iter p2tr: height_to_first_p2traddressindex_iter
.unwrap_get_inner(height) .unsafe_get(height)
.into(), .into(),
p2wpkh: height_to_first_p2wpkhaddressindex_iter p2wpkh: height_to_first_p2wpkhaddressindex_iter
.unwrap_get_inner(height) .unsafe_get(height)
.into(), .into(),
p2wsh: height_to_first_p2wshaddressindex_iter p2wsh: height_to_first_p2wshaddressindex_iter
.unwrap_get_inner(height) .unsafe_get(height)
.into(), .into(),
}; };
@@ -1211,7 +1209,7 @@ impl Vecs {
.into_iter() .into_iter()
.map(|state| state.value) .map(|state| state.value)
.sum::<Sats>() .sum::<Sats>()
+ height_to_unclaimed_rewards_iter.unwrap_get_inner(height); + height_to_unclaimed_rewards_iter.unsafe_get(height);
opreturn_supply += transacted.by_type.unspendable.opreturn.value; opreturn_supply += transacted.by_type.unspendable.opreturn.value;
@@ -1258,16 +1256,16 @@ impl Vecs {
self.addresstype_to_height_to_empty_addr_count self.addresstype_to_height_to_empty_addr_count
.forced_push_at(height, &addresstype_to_empty_addr_count, exit)?; .forced_push_at(height, &addresstype_to_empty_addr_count, exit)?;
let date = height_to_date_fixed_iter.unwrap_get_inner(height); let date = height_to_date_fixed_iter.unsafe_get(height);
let dateindex = DateIndex::try_from(date).unwrap(); let dateindex = DateIndex::try_from(date).unwrap();
let date_first_height = dateindex_to_first_height_iter.unwrap_get_inner(dateindex); let date_first_height = dateindex_to_first_height_iter.unsafe_get(dateindex);
let date_height_count = dateindex_to_height_count_iter.unwrap_get_inner(dateindex); let date_height_count = dateindex_to_height_count_iter.unsafe_get(dateindex);
let is_date_last_height = date_first_height let is_date_last_height = date_first_height
+ Height::from(date_height_count).decremented().unwrap() + Height::from(date_height_count).decremented().unwrap()
== height; == height;
let date_price = dateindex_to_price_close_iter let date_price = dateindex_to_price_close_iter
.as_mut() .as_mut()
.map(|v| is_date_last_height.then(|| *v.unwrap_get_inner(dateindex))); .map(|v| is_date_last_height.then(|| *v.unsafe_get(dateindex)));
let dateindex = is_date_last_height.then_some(dateindex); let dateindex = is_date_last_height.then_some(dateindex);
@@ -2137,7 +2135,7 @@ fn build_txoutindex_to_txindex<'a>(
let block_first_txindex = block_first_txindex.to_usize(); let block_first_txindex = block_first_txindex.to_usize();
for tx_offset in 0..block_tx_count as usize { for tx_offset in 0..block_tx_count as usize {
let txindex = TxIndex::from(block_first_txindex + tx_offset); let txindex = TxIndex::from(block_first_txindex + tx_offset);
let output_count = u64::from(txindex_to_output_count.unwrap_get_inner(txindex)); let output_count = u64::from(txindex_to_output_count.unsafe_get(txindex));
for _ in 0..output_count { for _ in 0..output_count {
vec.push(txindex); vec.push(txindex);
@@ -2157,7 +2155,7 @@ fn build_txinindex_to_txindex<'a>(
let block_first_txindex = block_first_txindex.to_usize(); let block_first_txindex = block_first_txindex.to_usize();
for tx_offset in 0..block_tx_count as usize { for tx_offset in 0..block_tx_count as usize {
let txindex = TxIndex::from(block_first_txindex + tx_offset); let txindex = TxIndex::from(block_first_txindex + tx_offset);
let input_count = u64::from(txindex_to_input_count.unwrap_get_inner(txindex)); let input_count = u64::from(txindex_to_input_count.unsafe_get(txindex));
for _ in 0..input_count { for _ in 0..input_count {
vec.push(txindex); vec.push(txindex);

View File

@@ -39,15 +39,19 @@ impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> {
let mut other_iter = closes.iter(); let mut other_iter = closes.iter();
let mut prev = None; let mut prev = None;
let index = max_from.min(DateIndex::from(self.len())); let index = max_from.to_usize().min(self.len());
closes.iter_at(index).try_for_each(|(i, closes)| { closes
.iter()
.skip(index)
.enumerate()
.try_for_each(|(i, closes)| {
let price = *closes; let price = *closes;
let i_usize = i.to_usize(); let i_usize = i.to_usize();
if prev.is_none() { if prev.is_none() {
if i_usize == 0 { if i_usize == 0 {
prev.replace(Sats::ZERO); prev.replace(Sats::ZERO);
} else { } else {
prev.replace(self.into_iter().unwrap_get_inner_(i_usize - 1)); prev.replace(self.one_shot_get_any_or_read_(i_usize - 1));
} }
} }
@@ -57,7 +61,7 @@ impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> {
stack = prev.unwrap() + Sats::from(Bitcoin::from(DCA_AMOUNT / price)); stack = prev.unwrap() + Sats::from(Bitcoin::from(DCA_AMOUNT / price));
if i_usize >= len { if i_usize >= len {
let prev_price = *other_iter.unwrap_get_inner_(i_usize - len); let prev_price = *other_iter.get_(i_usize - len);
if prev_price != Dollars::ZERO { if prev_price != Dollars::ZERO {
stack = stack stack = stack
.checked_sub(Sats::from(Bitcoin::from(DCA_AMOUNT / prev_price))) .checked_sub(Sats::from(Bitcoin::from(DCA_AMOUNT / prev_price)))
@@ -90,14 +94,18 @@ impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> {
let mut prev = None; let mut prev = None;
let index = max_from.min(DateIndex::from(self.len())); let index = max_from.min(DateIndex::from(self.len()));
closes.iter_at(index).try_for_each(|(i, closes)| { closes
.iter()
.skip(index)
.enumerate()
.try_for_each(|(i, closes)| {
let price = *closes; let price = *closes;
let i_usize = i.to_usize(); let i_usize = i.to_usize();
if prev.is_none() { if prev.is_none() {
if i_usize == 0 { if i_usize == 0 {
prev.replace(Sats::ZERO); prev.replace(Sats::ZERO);
} else { } else {
prev.replace(self.into_iter().unwrap_get_inner_(i_usize - 1)); prev.replace(self.one_shot_get_any_or_read_(i_usize - 1));
} }
} }
@@ -151,7 +159,11 @@ impl ComputeDCAAveragePriceViaLen for EagerVec<DateIndex, Dollars> {
let first_price_date = DateIndex::try_from(Date::new(2010, 7, 12)).unwrap(); let first_price_date = DateIndex::try_from(Date::new(2010, 7, 12)).unwrap();
stacks.iter_at(index).try_for_each(|(i, stack)| { stacks
.iter()
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, stack)| {
let mut avg_price = Dollars::from(f64::NAN); let mut avg_price = Dollars::from(f64::NAN);
if i > first_price_date { if i > first_price_date {
avg_price = DCA_AMOUNT avg_price = DCA_AMOUNT
@@ -183,7 +195,11 @@ impl ComputeDCAAveragePriceViaLen for EagerVec<DateIndex, Dollars> {
let from_usize = from.to_usize(); let from_usize = from.to_usize();
stacks.iter_at(index).try_for_each(|(i, stack)| { stacks
.iter()
.skip(index)
.enumerate()
.try_for_each(|(i, stack)| {
let mut avg_price = Dollars::from(f64::NAN); let mut avg_price = Dollars::from(f64::NAN);
if i >= from { if i >= from {
avg_price = DCA_AMOUNT * (i.to_usize() + 1 - from_usize) / Bitcoin::from(stack); avg_price = DCA_AMOUNT * (i.to_usize() + 1 - from_usize) / Bitcoin::from(stack);
@@ -220,7 +236,10 @@ where
)?; )?;
let index = max_from.min(I::from(self.len())); let index = max_from.min(I::from(self.len()));
sats.iter_at(index).try_for_each(|(i, sats)| { sats.iter()
.skip(index)
.enumerate()
.try_for_each(|(i, sats)| {
let (i, v) = (i, Bitcoin::from(sats)); let (i, v) = (i, Bitcoin::from(sats));
self.forced_push_at(i, v, exit) self.forced_push_at(i, v, exit)
})?; })?;
@@ -257,8 +276,12 @@ where
let mut price_iter = price.iter(); let mut price_iter = price.iter();
let index = max_from.min(I::from(self.len())); let index = max_from.min(I::from(self.len()));
bitcoin.iter_at(index).try_for_each(|(i, bitcoin)| { bitcoin
let dollars = price_iter.unwrap_get_inner(i); .iter()
.skip(index)
.enumerate()
.try_for_each(|(i, bitcoin)| {
let dollars = price_iter.unsafe_get(i);
let (i, v) = (i, *dollars * bitcoin); let (i, v) = (i, *dollars * bitcoin);
self.forced_push_at(i, v, exit) self.forced_push_at(i, v, exit)
})?; })?;
@@ -295,11 +318,14 @@ where
let index = max_from.min(I::from(self.len())); let index = max_from.min(I::from(self.len()));
let mut close_iter = close.iter(); let mut close_iter = close.iter();
ath.iter_at(index).try_for_each(|(i, ath)| { ath.iter()
.skip(index)
.enumerate()
.try_for_each(|(i, ath)| {
if ath == Dollars::ZERO { if ath == Dollars::ZERO {
self.forced_push_at(i, StoredF32::default(), exit) self.forced_push_at(i, StoredF32::default(), exit)
} else { } else {
let close = *close_iter.unwrap_get_inner(i); let close = *close_iter.unsafe_get(i);
let drawdown = StoredF32::from((*ath - *close) / *ath * -100.0); let drawdown = StoredF32::from((*ath - *close) / *ath * -100.0);
self.forced_push_at(i, drawdown, exit) self.forced_push_at(i, drawdown, exit)
} }

View File

@@ -24,6 +24,6 @@ fjall2 = { workspace = true }
fjall3 = { workspace = true } fjall3 = { workspace = true }
log = { workspace = true } log = { workspace = true }
rayon = { workspace = true } rayon = { workspace = true }
redb = { workspace = true }
rustc-hash = { workspace = true } rustc-hash = { workspace = true }
vecdb = { workspace = true } vecdb = { workspace = true }
rand = "0.9.2"

View File

@@ -1,5 +1,5 @@
use std::{ use std::{
fs, env, fs,
path::Path, path::Path,
thread::sleep, thread::sleep,
time::{Duration, Instant}, time::{Duration, Instant},
@@ -15,18 +15,15 @@ use vecdb::Exit;
fn main() -> Result<()> { fn main() -> Result<()> {
brk_logger::init(Some(Path::new(".log")))?; brk_logger::init(Some(Path::new(".log")))?;
let bitcoin_dir = Path::new(&std::env::var("HOME").unwrap()) let bitcoin_dir = Client::default_bitcoin_path();
.join("Library")
.join("Application Support")
.join("Bitcoin");
// let bitcoin_dir = Path::new("/Volumes/WD_BLACK1/bitcoin"); // let bitcoin_dir = Path::new("/Volumes/WD_BLACK1/bitcoin");
let outputs_dir = Path::new(&std::env::var("HOME").unwrap()).join(".brk"); let outputs_dir = Path::new(&env::var("HOME").unwrap()).join(".brk");
fs::create_dir_all(&outputs_dir)?; fs::create_dir_all(&outputs_dir)?;
// let outputs_dir = Path::new("/Volumes/WD_BLACK1/brk"); // let outputs_dir = Path::new("/Volumes/WD_BLACK1/brk");
let client = Client::new( let client = Client::new(
"http://localhost:8332", Client::default_url(),
Auth::CookieFile(bitcoin_dir.join(".cookie")), Auth::CookieFile(bitcoin_dir.join(".cookie")),
)?; )?;

View File

@@ -9,7 +9,7 @@ use brk_error::Result;
use brk_indexer::Indexer; use brk_indexer::Indexer;
use brk_types::TxInIndex; use brk_types::TxInIndex;
use rayon::prelude::*; use rayon::prelude::*;
use vecdb::{GenericStoredVec, StoredIndex}; use vecdb::{AnyVec, GenericStoredVec, StoredIndex};
fn main() -> Result<()> { fn main() -> Result<()> {
brk_logger::init(Some(Path::new(".log")))?; brk_logger::init(Some(Path::new(".log")))?;
@@ -20,8 +20,8 @@ fn main() -> Result<()> {
let indexer = Indexer::forced_import(&outputs_dir)?; let indexer = Indexer::forced_import(&outputs_dir)?;
let vecs = indexer.vecs; let vecs = indexer.vecs;
let output_len = vecs.txoutindex_to_value.len_(); let output_len = vecs.txoutindex_to_value.len();
let input_len = vecs.txinindex_to_outpoint.len_(); let input_len = vecs.txinindex_to_outpoint.len();
dbg!(output_len, input_len); dbg!(output_len, input_len);
// Simulate processing blocks // Simulate processing blocks
@@ -244,11 +244,11 @@ fn run_method1(
.map(|i| { .map(|i| {
( (
vecs.txoutindex_to_value vecs.txoutindex_to_value
.unwrap_read_(i, &txoutindex_to_value_reader), .read_at_unwrap(i, &txoutindex_to_value_reader),
vecs.txoutindex_to_outputtype vecs.txoutindex_to_outputtype
.unwrap_read_(i, &txoutindex_to_outputtype_reader), .read_at_unwrap(i, &txoutindex_to_outputtype_reader),
vecs.txoutindex_to_typeindex vecs.txoutindex_to_typeindex
.unwrap_read_(i, &txoutindex_to_typeindex_reader), .read_at_unwrap(i, &txoutindex_to_typeindex_reader),
) )
}) })
.collect(); .collect();
@@ -261,20 +261,20 @@ fn run_method1(
.filter_map(|i| { .filter_map(|i| {
let outpoint = vecs let outpoint = vecs
.txinindex_to_outpoint .txinindex_to_outpoint
.unwrap_read_(i, &txinindex_to_outpoint_reader); .read_at_unwrap(i, &txinindex_to_outpoint_reader);
if outpoint.is_coinbase() { if outpoint.is_coinbase() {
return None; return None;
} }
let first_txoutindex = vecs.txindex_to_first_txoutindex.unwrap_read_( let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(), outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader, &txindex_to_first_txoutindex_reader,
); );
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout()); let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs let value = vecs
.txoutindex_to_value .txoutindex_to_value
.unwrap_read_(txoutindex, &txoutindex_to_value_reader); .read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value)) Some(u64::from(value))
}) })
.sum(); .sum();
@@ -350,14 +350,14 @@ fn run_method2(
return None; return None;
} }
let first_txoutindex = vecs.txindex_to_first_txoutindex.unwrap_read_( let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(), outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader, &txindex_to_first_txoutindex_reader,
); );
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout()); let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs let value = vecs
.txoutindex_to_value .txoutindex_to_value
.unwrap_read_(txoutindex, &txoutindex_to_value_reader); .read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value)) Some(u64::from(value))
}) })
.sum(); .sum();
@@ -447,14 +447,14 @@ fn run_method4(
return None; return None;
} }
let first_txoutindex = vecs.txindex_to_first_txoutindex.unwrap_read_( let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(), outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader, &txindex_to_first_txoutindex_reader,
); );
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout()); let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs let value = vecs
.txoutindex_to_value .txoutindex_to_value
.unwrap_read_(txoutindex, &txoutindex_to_value_reader); .read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value)) Some(u64::from(value))
}) })
.sum(); .sum();
@@ -491,11 +491,11 @@ fn run_method5(
.map(|i| { .map(|i| {
( (
vecs.txoutindex_to_value vecs.txoutindex_to_value
.unwrap_read_(i, &txoutindex_to_value_reader), .read_at_unwrap(i, &txoutindex_to_value_reader),
vecs.txoutindex_to_outputtype vecs.txoutindex_to_outputtype
.unwrap_read_(i, &txoutindex_to_outputtype_reader), .read_at_unwrap(i, &txoutindex_to_outputtype_reader),
vecs.txoutindex_to_typeindex vecs.txoutindex_to_typeindex
.unwrap_read_(i, &txoutindex_to_typeindex_reader), .read_at_unwrap(i, &txoutindex_to_typeindex_reader),
) )
}) })
.collect(); .collect();
@@ -509,20 +509,20 @@ fn run_method5(
.filter_map(|i| { .filter_map(|i| {
let outpoint = vecs let outpoint = vecs
.txinindex_to_outpoint .txinindex_to_outpoint
.unwrap_read_(i, &txinindex_to_outpoint_reader); .read_at_unwrap(i, &txinindex_to_outpoint_reader);
if outpoint.is_coinbase() { if outpoint.is_coinbase() {
return None; return None;
} }
let first_txoutindex = vecs.txindex_to_first_txoutindex.unwrap_read_( let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(), outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader, &txindex_to_first_txoutindex_reader,
); );
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout()); let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs let value = vecs
.txoutindex_to_value .txoutindex_to_value
.unwrap_read_(txoutindex, &txoutindex_to_value_reader); .read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value)) Some(u64::from(value))
}) })
.sum(); .sum();
@@ -585,16 +585,17 @@ fn run_method6(
let txoutindex_to_value_reader = vecs.txoutindex_to_value.create_reader(); let txoutindex_to_value_reader = vecs.txoutindex_to_value.create_reader();
// Prefetch all first_txoutindexes in parallel // Prefetch all first_txoutindexes in parallel
let first_txoutindexes: Vec<Option<_>> = outpoints let first_txoutindexes: Vec<Option<_>> =
outpoints
.par_iter() .par_iter()
.map(|op| { .map(|op| {
if op.is_coinbase() { if op.is_coinbase() {
return None; return None;
} }
Some( Some(vecs.txindex_to_first_txoutindex.read_at_unwrap(
vecs.txindex_to_first_txoutindex op.txindex().to_usize(),
.unwrap_read_(op.txindex().to_usize(), &txindex_to_first_txoutindex_reader), &txindex_to_first_txoutindex_reader,
) ))
}) })
.collect(); .collect();
@@ -607,7 +608,7 @@ fn run_method6(
let txoutindex = first_txoutindex.to_usize() + usize::from(op.vout()); let txoutindex = first_txoutindex.to_usize() + usize::from(op.vout());
let value = vecs let value = vecs
.txoutindex_to_value .txoutindex_to_value
.unwrap_read_(txoutindex, &txoutindex_to_value_reader); .read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value)) Some(u64::from(value))
}) })
.sum(); .sum();
@@ -648,11 +649,11 @@ fn run_method7(
.map(|i| { .map(|i| {
( (
vecs.txoutindex_to_value vecs.txoutindex_to_value
.unwrap_read_(i, &txoutindex_to_value_reader), .read_at_unwrap(i, &txoutindex_to_value_reader),
vecs.txoutindex_to_outputtype vecs.txoutindex_to_outputtype
.unwrap_read_(i, &txoutindex_to_outputtype_reader), .read_at_unwrap(i, &txoutindex_to_outputtype_reader),
vecs.txoutindex_to_typeindex vecs.txoutindex_to_typeindex
.unwrap_read_(i, &txoutindex_to_typeindex_reader), .read_at_unwrap(i, &txoutindex_to_typeindex_reader),
) )
}) })
.collect(); .collect();
@@ -664,20 +665,20 @@ fn run_method7(
.filter_map(|i| { .filter_map(|i| {
let outpoint = vecs let outpoint = vecs
.txinindex_to_outpoint .txinindex_to_outpoint
.unwrap_read_(i, &txinindex_to_outpoint_reader); .read_at_unwrap(i, &txinindex_to_outpoint_reader);
if outpoint.is_coinbase() { if outpoint.is_coinbase() {
return None; return None;
} }
let first_txoutindex = vecs.txindex_to_first_txoutindex.unwrap_read_( let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(), outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader, &txindex_to_first_txoutindex_reader,
); );
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout()); let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs let value = vecs
.txoutindex_to_value .txoutindex_to_value
.unwrap_read_(txoutindex, &txoutindex_to_value_reader); .read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value)) Some(u64::from(value))
}) })
.sum(); .sum();
@@ -720,11 +721,11 @@ fn run_method8(
let i = block_start + offset; let i = block_start + offset;
( (
vecs.txoutindex_to_value vecs.txoutindex_to_value
.unwrap_read_(i, &txoutindex_to_value_reader), .read_at_unwrap(i, &txoutindex_to_value_reader),
vecs.txoutindex_to_outputtype vecs.txoutindex_to_outputtype
.unwrap_read_(i, &txoutindex_to_outputtype_reader), .read_at_unwrap(i, &txoutindex_to_outputtype_reader),
vecs.txoutindex_to_typeindex vecs.txoutindex_to_typeindex
.unwrap_read_(i, &txoutindex_to_typeindex_reader), .read_at_unwrap(i, &txoutindex_to_typeindex_reader),
) )
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
@@ -744,20 +745,20 @@ fn run_method8(
let i = input_block_start + offset; let i = input_block_start + offset;
let outpoint = vecs let outpoint = vecs
.txinindex_to_outpoint .txinindex_to_outpoint
.unwrap_read_(i, &txinindex_to_outpoint_reader); .read_at_unwrap(i, &txinindex_to_outpoint_reader);
if outpoint.is_coinbase() { if outpoint.is_coinbase() {
return None; return None;
} }
let first_txoutindex = vecs.txindex_to_first_txoutindex.unwrap_read_( let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(), outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader, &txindex_to_first_txoutindex_reader,
); );
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout()); let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs let value = vecs
.txoutindex_to_value .txoutindex_to_value
.unwrap_read_(txoutindex, &txoutindex_to_value_reader); .read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value)) Some(u64::from(value))
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()

View File

@@ -8,31 +8,30 @@ use brk_iterator::Blocks;
use brk_rpc::Client; use brk_rpc::Client;
use brk_store::AnyStore; use brk_store::AnyStore;
use brk_types::{ use brk_types::{
AddressBytes, AddressBytesHash, BlockHashPrefix, Height, OutPoint, OutputType, Sats, AddressBytes, AddressBytesHash, AddressTypeAddressIndexOutPoint,
StoredBool, Timestamp, TxInIndex, TxIndex, TxOutIndex, Txid, TxidPrefix, TypeIndex, AddressTypeAddressIndexTxIndex, BlockHashPrefix, Height, OutPoint, OutputType, Sats,
TypeIndexAndOutPoint, TypeIndexAndTxIndex, Unit, Version, Vin, Vout, StoredBool, Timestamp, TxInIndex, TxIndex, TxOutIndex, Txid, TxidPrefix, TypeIndex, Unit,
Version, Vin, Vout,
}; };
use log::{error, info}; use log::{error, info};
use rayon::prelude::*; use rayon::prelude::*;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use vecdb::{AnyVec, Exit, GenericStoredVec, Reader, VecIteratorExtended}; use vecdb::{AnyVec, Exit, GenericStoredVec, Reader, VecIteratorExtended};
mod indexes; mod indexes;
// mod stores_redb; mod stores_v2;
// mod stores_v2; // mod stores_v3;
mod stores_v3;
mod vecs; mod vecs;
pub use indexes::*; pub use indexes::*;
// pub use stores_redb::*; pub use stores_v2::*;
// pub use stores_v2::*; // pub use stores_v3::*;
pub use stores_v3::*;
pub use vecs::*; pub use vecs::*;
// One version for all data sources // One version for all data sources
// Increment on **change _OR_ addition** // Increment on **change _OR_ addition**
const VERSION: Version = Version::new(23); const VERSION: Version = Version::new(23);
const SNAPSHOT_BLOCK_RANGE: usize = 1_000; const SNAPSHOT_BLOCK_RANGE: usize = 1_000;
const COLLISIONS_CHECKED_UP_TO: Height = Height::new(0); const COLLISIONS_CHECKED_UP_TO: Height = Height::new(920_000);
#[derive(Clone)] #[derive(Clone)]
pub struct Indexer { pub struct Indexer {
@@ -262,7 +261,7 @@ impl Indexer {
let vout = Vout::from(outpoint.vout); let vout = Vout::from(outpoint.vout);
let txoutindex = vecs.txindex_to_first_txoutindex.get_pushed_or_read(prev_txindex, &readers.txindex_to_first_txoutindex)? let txoutindex = vecs.txindex_to_first_txoutindex.get_pushed_or_read_with(prev_txindex, &readers.txindex_to_first_txoutindex)?
.ok_or(Error::Str("Expect txoutindex to not be none")) .ok_or(Error::Str("Expect txoutindex to not be none"))
.inspect_err(|_| { .inspect_err(|_| {
dbg!(outpoint.txid, prev_txindex, vout); dbg!(outpoint.txid, prev_txindex, vout);
@@ -271,7 +270,7 @@ impl Indexer {
let outpoint = OutPoint::new(prev_txindex, vout); let outpoint = OutPoint::new(prev_txindex, vout);
let outputtype = vecs.txoutindex_to_outputtype.get_pushed_or_read(txoutindex, &readers.txoutindex_to_outputtype)? let outputtype = vecs.txoutindex_to_outputtype.get_pushed_or_read_with(txoutindex, &readers.txoutindex_to_outputtype)?
.ok_or(Error::Str("Expect outputtype to not be none"))?; .ok_or(Error::Str("Expect outputtype to not be none"))?;
let mut tuple = ( let mut tuple = (
@@ -286,7 +285,7 @@ impl Indexer {
if outputtype.is_address() { if outputtype.is_address() {
let typeindex = vecs let typeindex = vecs
.txoutindex_to_typeindex .txoutindex_to_typeindex
.get_pushed_or_read(txoutindex, &readers.txoutindex_to_typeindex)? .get_pushed_or_read_with(txoutindex, &readers.txoutindex_to_typeindex)?
.ok_or(Error::Str("Expect typeindex to not be none"))?; .ok_or(Error::Str("Expect typeindex to not be none"))?;
tuple.3 = Some((outputtype, typeindex)); tuple.3 = Some((outputtype, typeindex));
} }
@@ -374,56 +373,56 @@ impl Indexer {
let prev_addressbytes_opt = match outputtype { let prev_addressbytes_opt = match outputtype {
OutputType::P2PK65 => vecs OutputType::P2PK65 => vecs
.p2pk65addressindex_to_p2pk65bytes .p2pk65addressindex_to_p2pk65bytes
.get_pushed_or_read( .get_pushed_or_read_with(
typeindex.into(), typeindex.into(),
&readers.p2pk65addressindex_to_p2pk65bytes, &readers.p2pk65addressindex_to_p2pk65bytes,
)? )?
.map(AddressBytes::from), .map(AddressBytes::from),
OutputType::P2PK33 => vecs OutputType::P2PK33 => vecs
.p2pk33addressindex_to_p2pk33bytes .p2pk33addressindex_to_p2pk33bytes
.get_pushed_or_read( .get_pushed_or_read_with(
typeindex.into(), typeindex.into(),
&readers.p2pk33addressindex_to_p2pk33bytes, &readers.p2pk33addressindex_to_p2pk33bytes,
)? )?
.map(AddressBytes::from), .map(AddressBytes::from),
OutputType::P2PKH => vecs OutputType::P2PKH => vecs
.p2pkhaddressindex_to_p2pkhbytes .p2pkhaddressindex_to_p2pkhbytes
.get_pushed_or_read( .get_pushed_or_read_with(
typeindex.into(), typeindex.into(),
&readers.p2pkhaddressindex_to_p2pkhbytes, &readers.p2pkhaddressindex_to_p2pkhbytes,
)? )?
.map(AddressBytes::from), .map(AddressBytes::from),
OutputType::P2SH => vecs OutputType::P2SH => vecs
.p2shaddressindex_to_p2shbytes .p2shaddressindex_to_p2shbytes
.get_pushed_or_read( .get_pushed_or_read_with(
typeindex.into(), typeindex.into(),
&readers.p2shaddressindex_to_p2shbytes, &readers.p2shaddressindex_to_p2shbytes,
)? )?
.map(AddressBytes::from), .map(AddressBytes::from),
OutputType::P2WPKH => vecs OutputType::P2WPKH => vecs
.p2wpkhaddressindex_to_p2wpkhbytes .p2wpkhaddressindex_to_p2wpkhbytes
.get_pushed_or_read( .get_pushed_or_read_with(
typeindex.into(), typeindex.into(),
&readers.p2wpkhaddressindex_to_p2wpkhbytes, &readers.p2wpkhaddressindex_to_p2wpkhbytes,
)? )?
.map(AddressBytes::from), .map(AddressBytes::from),
OutputType::P2WSH => vecs OutputType::P2WSH => vecs
.p2wshaddressindex_to_p2wshbytes .p2wshaddressindex_to_p2wshbytes
.get_pushed_or_read( .get_pushed_or_read_with(
typeindex.into(), typeindex.into(),
&readers.p2wshaddressindex_to_p2wshbytes, &readers.p2wshaddressindex_to_p2wshbytes,
)? )?
.map(AddressBytes::from), .map(AddressBytes::from),
OutputType::P2TR => vecs OutputType::P2TR => vecs
.p2traddressindex_to_p2trbytes .p2traddressindex_to_p2trbytes
.get_pushed_or_read( .get_pushed_or_read_with(
typeindex.into(), typeindex.into(),
&readers.p2traddressindex_to_p2trbytes, &readers.p2traddressindex_to_p2trbytes,
)? )?
.map(AddressBytes::from), .map(AddressBytes::from),
OutputType::P2A => vecs OutputType::P2A => vecs
.p2aaddressindex_to_p2abytes .p2aaddressindex_to_p2abytes
.get_pushed_or_read( .get_pushed_or_read_with(
typeindex.into(), typeindex.into(),
&readers.p2aaddressindex_to_p2abytes, &readers.p2aaddressindex_to_p2abytes,
)? )?
@@ -573,12 +572,17 @@ impl Indexer {
if outputtype.is_unspendable() { if outputtype.is_unspendable() {
continue; continue;
} else if outputtype.is_address() { } else if outputtype.is_address() {
let addresstype = outputtype;
let addressindex = typeindex;
stores stores
.addresstype_to_typeindex_and_txindex .addresstype_to_addressindex_and_txindex
.get_mut(outputtype)
.unwrap()
.insert_if_needed( .insert_if_needed(
TypeIndexAndTxIndex::from((typeindex, txindex)), AddressTypeAddressIndexTxIndex::from((
addresstype,
addressindex,
txindex,
)),
Unit, Unit,
height, height,
); );
@@ -589,12 +593,17 @@ impl Indexer {
if same_block_spent_outpoints.contains(&outpoint) { if same_block_spent_outpoints.contains(&outpoint) {
same_block_output_info.insert(outpoint, (outputtype, typeindex)); same_block_output_info.insert(outpoint, (outputtype, typeindex));
} else if outputtype.is_address() { } else if outputtype.is_address() {
let addresstype = outputtype;
let addressindex = typeindex;
stores stores
.addresstype_to_typeindex_and_unspentoutpoint .addresstype_to_addressindex_and_unspentoutpoint
.get_mut(outputtype)
.unwrap()
.insert_if_needed( .insert_if_needed(
TypeIndexAndOutPoint::from((typeindex, outpoint)), AddressTypeAddressIndexOutPoint::from((
addresstype,
addressindex,
outpoint,
)),
Unit, Unit,
height, height,
); );
@@ -607,7 +616,7 @@ impl Indexer {
// let i = Instant::now(); // let i = Instant::now();
for (txinindex, input_source) in txins { for (txinindex, input_source) in txins {
let (vin, txindex, outpoint, outputtype_typeindex_opt) = match input_source { let (vin, txindex, outpoint, addresstype_addressindex_opt) = match input_source {
InputSource::PreviousBlock(tuple) => tuple, InputSource::PreviousBlock(tuple) => tuple,
InputSource::SameBlock((txindex, txin, vin, outpoint)) => { InputSource::SameBlock((txindex, txin, vin, outpoint)) => {
let mut tuple = (vin, txindex, outpoint, None); let mut tuple = (vin, txindex, outpoint, None);
@@ -636,23 +645,28 @@ impl Indexer {
vecs.txinindex_to_outpoint vecs.txinindex_to_outpoint
.push_if_needed(txinindex, outpoint)?; .push_if_needed(txinindex, outpoint)?;
let Some((outputtype, typeindex)) = outputtype_typeindex_opt else { let Some((addresstype, addressindex)) = addresstype_addressindex_opt else {
continue; continue;
}; };
stores stores
.addresstype_to_typeindex_and_txindex .addresstype_to_addressindex_and_txindex
.get_mut_unwrap(outputtype)
.insert_if_needed( .insert_if_needed(
TypeIndexAndTxIndex::from((typeindex, txindex)), AddressTypeAddressIndexTxIndex::from((addresstype, addressindex, txindex)),
Unit, Unit,
height, height,
); );
stores stores
.addresstype_to_typeindex_and_unspentoutpoint .addresstype_to_addressindex_and_unspentoutpoint
.get_mut_unwrap(outputtype) .remove_if_needed(
.remove_if_needed(TypeIndexAndOutPoint::from((typeindex, outpoint)), height); AddressTypeAddressIndexOutPoint::from((
addresstype,
addressindex,
outpoint,
)),
height,
);
} }
// println!("txins.into_iter(): {:?}", i.elapsed()); // println!("txins.into_iter(): {:?}", i.elapsed());

View File

@@ -1,408 +0,0 @@
use std::{fs, path::Path, sync::Arc};
use brk_error::Result;
use brk_grouper::ByAddressType;
use brk_store::{AnyStore, StoreRedb as Store};
use brk_types::{
AddressBytes, AddressBytesHash, BlockHashPrefix, Height, OutPoint, StoredString, TxIndex,
TxOutIndex, TxidPrefix, TypeIndex, TypeIndexAndOutPoint, TypeIndexAndTxIndex, Unit, Version,
Vout,
};
use rayon::prelude::*;
use redb::Database;
use vecdb::{AnyVec, GenericStoredVec, StoredIndex, VecIterator, VecIteratorExtended};
use crate::Indexes;
use super::Vecs;
#[derive(Clone)]
pub struct Stores {
pub database: Arc<Database>,
pub addressbyteshash_to_typeindex: Store<AddressBytesHash, TypeIndex>,
pub blockhashprefix_to_height: Store<BlockHashPrefix, Height>,
pub height_to_coinbase_tag: Store<Height, StoredString>,
pub txidprefix_to_txindex: Store<TxidPrefix, TxIndex>,
pub addresstype_to_typeindex_and_txindex: ByAddressType<Store<TypeIndexAndTxIndex, Unit>>,
pub addresstype_to_typeindex_and_unspentoutpoint:
ByAddressType<Store<TypeIndexAndOutPoint, Unit>>,
}
impl Stores {
pub fn forced_import(parent: &Path, version: Version) -> Result<Self> {
let pathbuf = parent.join("stores");
let path = pathbuf.as_path();
fs::create_dir_all(&pathbuf)?;
let database = Arc::new(match brk_store::open_redb_database(path) {
Ok(database) => database,
Err(_) => {
fs::remove_dir_all(path)?;
return Self::forced_import(path, version);
}
});
let database_ref = &database;
let create_addressindex_and_txindex_store = |index| {
Store::import(
database_ref,
path,
&format!("a2t{}", index),
version,
Some(false),
)
};
let create_addressindex_and_unspentoutpoint_store =
|index| Store::import(database_ref, path, &format!("a2u{}", index), version, None);
Ok(Self {
database: database.clone(),
height_to_coinbase_tag: Store::import(database_ref, path, "h2c", version, None)?,
addressbyteshash_to_typeindex: Store::import(database_ref, path, "a2t", version, None)?,
blockhashprefix_to_height: Store::import(database_ref, path, "b2h", version, None)?,
txidprefix_to_txindex: Store::import(database_ref, path, "t2t", version, None)?,
addresstype_to_typeindex_and_txindex: ByAddressType::new_with_index(
create_addressindex_and_txindex_store,
)?,
addresstype_to_typeindex_and_unspentoutpoint: ByAddressType::new_with_index(
create_addressindex_and_unspentoutpoint_store,
)?,
})
}
pub fn starting_height(&self) -> Height {
self.iter_any_store()
.map(|store| {
// let height =
store.height().map(Height::incremented).unwrap_or_default()
// dbg!((height, store.name()));
})
.min()
.unwrap()
}
pub fn commit(&mut self, height: Height) -> Result<()> {
[
&mut self.addressbyteshash_to_typeindex as &mut dyn AnyStore,
&mut self.blockhashprefix_to_height,
&mut self.height_to_coinbase_tag,
&mut self.txidprefix_to_txindex,
]
// .into_iter() // Changed from par_iter_mut()
.into_par_iter() // Changed from par_iter_mut()
.chain(
self.addresstype_to_typeindex_and_txindex
// .iter_mut()
.par_iter_mut()
.map(|s| s as &mut dyn AnyStore),
)
.chain(
self.addresstype_to_typeindex_and_unspentoutpoint
// .iter_mut()
.par_iter_mut()
.map(|s| s as &mut dyn AnyStore),
)
.try_for_each(|store| store.commit(height))?;
Ok(())
// self.database
// .persist(PersistMode::SyncAll)
// .map_err(|e| e.into())
}
fn iter_any_store(&self) -> impl Iterator<Item = &dyn AnyStore> {
[
&self.addressbyteshash_to_typeindex as &dyn AnyStore,
&self.blockhashprefix_to_height,
&self.height_to_coinbase_tag,
&self.txidprefix_to_txindex,
]
.into_iter()
.chain(
self.addresstype_to_typeindex_and_txindex
.iter()
.map(|s| s as &dyn AnyStore),
)
.chain(
self.addresstype_to_typeindex_and_unspentoutpoint
.iter()
.map(|s| s as &dyn AnyStore),
)
}
pub fn rollback_if_needed(
&mut self,
vecs: &mut Vecs,
starting_indexes: &Indexes,
) -> Result<()> {
if self.addressbyteshash_to_typeindex.is_empty()?
&& self.blockhashprefix_to_height.is_empty()?
&& self.txidprefix_to_txindex.is_empty()?
&& self.height_to_coinbase_tag.is_empty()?
&& self
.addresstype_to_typeindex_and_txindex
.iter()
.map(|s| s.is_empty())
.collect::<Result<Vec<_>>>()?
.into_iter()
.all(|empty| empty)
&& self
.addresstype_to_typeindex_and_unspentoutpoint
.iter()
.map(|s| s.is_empty())
.collect::<Result<Vec<_>>>()?
.into_iter()
.all(|empty| empty)
{
return Ok(());
}
if starting_indexes.height != Height::ZERO {
vecs.height_to_blockhash
.iter()?
.skip(starting_indexes.height.to_usize())
.map(BlockHashPrefix::from)
.for_each(|prefix| {
self.blockhashprefix_to_height.remove(prefix);
});
(starting_indexes.height.to_usize()..vecs.height_to_blockhash.len())
.map(Height::from)
.for_each(|h| {
self.height_to_coinbase_tag.remove(h);
});
if let Ok(mut index) = vecs
.height_to_first_p2pk65addressindex
.one_shot_read(starting_indexes.height)
{
let mut p2pk65addressindex_to_p2pk65bytes_iter =
vecs.p2pk65addressindex_to_p2pk65bytes.iter()?;
while let Some(typedbytes) = p2pk65addressindex_to_p2pk65bytes_iter.get(index) {
let bytes = AddressBytes::from(typedbytes);
let hash = AddressBytesHash::from(&bytes);
self.addressbyteshash_to_typeindex.remove(hash);
index.increment();
}
}
if let Ok(mut index) = vecs
.height_to_first_p2pk33addressindex
.one_shot_read(starting_indexes.height)
{
let mut p2pk33addressindex_to_p2pk33bytes_iter =
vecs.p2pk33addressindex_to_p2pk33bytes.iter()?;
while let Some(typedbytes) = p2pk33addressindex_to_p2pk33bytes_iter.get(index) {
let bytes = AddressBytes::from(typedbytes);
let hash = AddressBytesHash::from(&bytes);
self.addressbyteshash_to_typeindex.remove(hash);
index.increment();
}
}
if let Ok(mut index) = vecs
.height_to_first_p2pkhaddressindex
.one_shot_read(starting_indexes.height)
{
let mut p2pkhaddressindex_to_p2pkhbytes_iter =
vecs.p2pkhaddressindex_to_p2pkhbytes.iter()?;
while let Some(typedbytes) = p2pkhaddressindex_to_p2pkhbytes_iter.get(index) {
let bytes = AddressBytes::from(typedbytes);
let hash = AddressBytesHash::from(&bytes);
self.addressbyteshash_to_typeindex.remove(hash);
index.increment();
}
}
if let Ok(mut index) = vecs
.height_to_first_p2shaddressindex
.one_shot_read(starting_indexes.height)
{
let mut p2shaddressindex_to_p2shbytes_iter =
vecs.p2shaddressindex_to_p2shbytes.iter()?;
while let Some(typedbytes) = p2shaddressindex_to_p2shbytes_iter.get(index) {
let bytes = AddressBytes::from(typedbytes);
let hash = AddressBytesHash::from(&bytes);
self.addressbyteshash_to_typeindex.remove(hash);
index.increment();
}
}
if let Ok(mut index) = vecs
.height_to_first_p2traddressindex
.one_shot_read(starting_indexes.height)
{
let mut p2traddressindex_to_p2trbytes_iter =
vecs.p2traddressindex_to_p2trbytes.iter()?;
while let Some(typedbytes) = p2traddressindex_to_p2trbytes_iter.get(index) {
let bytes = AddressBytes::from(typedbytes);
let hash = AddressBytesHash::from(&bytes);
self.addressbyteshash_to_typeindex.remove(hash);
index.increment();
}
}
if let Ok(mut index) = vecs
.height_to_first_p2wpkhaddressindex
.one_shot_read(starting_indexes.height)
{
let mut p2wpkhaddressindex_to_p2wpkhbytes_iter =
vecs.p2wpkhaddressindex_to_p2wpkhbytes.iter()?;
while let Some(typedbytes) = p2wpkhaddressindex_to_p2wpkhbytes_iter.get(index) {
let bytes = AddressBytes::from(typedbytes);
let hash = AddressBytesHash::from(&bytes);
self.addressbyteshash_to_typeindex.remove(hash);
index.increment();
}
}
if let Ok(mut index) = vecs
.height_to_first_p2wshaddressindex
.one_shot_read(starting_indexes.height)
{
let mut p2wshaddressindex_to_p2wshbytes_iter =
vecs.p2wshaddressindex_to_p2wshbytes.iter()?;
while let Some(typedbytes) = p2wshaddressindex_to_p2wshbytes_iter.get(index) {
let bytes = AddressBytes::from(typedbytes);
let hash = AddressBytesHash::from(&bytes);
self.addressbyteshash_to_typeindex.remove(hash);
index.increment();
}
}
if let Ok(mut index) = vecs
.height_to_first_p2aaddressindex
.one_shot_read(starting_indexes.height)
{
let mut p2aaddressindex_to_p2abytes_iter =
vecs.p2aaddressindex_to_p2abytes.iter()?;
while let Some(typedbytes) = p2aaddressindex_to_p2abytes_iter.get(index) {
let bytes = AddressBytes::from(typedbytes);
let hash = AddressBytesHash::from(&bytes);
self.addressbyteshash_to_typeindex.remove(hash);
index.increment();
}
}
} else {
unreachable!();
// self.blockhashprefix_to_height.reset()?;
// self.addressbyteshash_to_typeindex.reset()?;
}
if starting_indexes.txindex != TxIndex::ZERO {
vecs.txindex_to_txid
.iter()?
.enumerate()
.skip(starting_indexes.txindex.to_usize())
.for_each(|(txindex, txid)| {
let txindex = TxIndex::from(txindex);
let txidprefix = TxidPrefix::from(&txid);
// "d5d27987d2a3dfc724e359870c6644b40e497bdc0589a033220fe15429d88599"
let is_not_first_dup = txindex != TxIndex::new(142783)
|| txidprefix != TxidPrefix::from([153, 133, 216, 41, 84, 225, 15, 34]);
// "e3bf3d07d4b0375638d5f1db5255fe07ba2c4cb067cd81b84ee974b6585fb468"
let is_not_second_dup = txindex != TxIndex::new(142841)
|| txidprefix != TxidPrefix::from([104, 180, 95, 88, 182, 116, 233, 78]);
if is_not_first_dup && is_not_second_dup {
self.txidprefix_to_txindex.remove(txidprefix);
}
});
} else {
unreachable!();
// self.txidprefix_to_txindex.reset()?;
}
if starting_indexes.txoutindex != TxOutIndex::ZERO {
let mut txoutindex_to_txindex_iter = vecs.txoutindex_to_txindex.iter()?;
let mut txindex_to_first_txoutindex_iter = vecs.txindex_to_first_txoutindex.iter()?;
vecs.txoutindex_to_outputtype
.iter()?
.enumerate()
.skip(starting_indexes.txoutindex.to_usize())
.zip(
vecs.txoutindex_to_typeindex
.iter()?
.skip(starting_indexes.txoutindex.to_usize()),
)
.filter(|((_, outputtype), _)| outputtype.is_address())
.for_each(|((txoutindex, outputtype), typeindex)| {
let txindex = txoutindex_to_txindex_iter.unsafe_get_(txoutindex);
let vout = Vout::from(
txoutindex.to_usize()
- txindex_to_first_txoutindex_iter
.unsafe_get(txindex)
.to_usize(),
);
let outpoint = OutPoint::new(txindex, vout);
self.addresstype_to_typeindex_and_unspentoutpoint
.get_mut(outputtype)
.unwrap()
.remove(TypeIndexAndOutPoint::from((typeindex, outpoint)));
});
// Add back outputs that were spent after the rollback point
let mut txindex_to_first_txoutindex_iter = vecs.txindex_to_first_txoutindex.iter()?;
let mut txoutindex_to_outputtype_iter = vecs.txoutindex_to_outputtype.iter()?;
let mut txoutindex_to_typeindex_iter = vecs.txoutindex_to_typeindex.iter()?;
vecs.txinindex_to_outpoint
.iter()?
.skip(starting_indexes.txinindex.to_usize())
.for_each(|outpoint| {
if outpoint.is_coinbase() {
return;
}
let txindex = outpoint.txindex();
let vout = outpoint.vout();
// Calculate txoutindex from txindex and vout
let txoutindex = txindex_to_first_txoutindex_iter.unsafe_get(txindex) + vout;
// Only process if this output was created before the rollback point
if txoutindex < starting_indexes.txoutindex {
let outputtype = txoutindex_to_outputtype_iter.unsafe_get(txoutindex);
if outputtype.is_address() {
let typeindex = txoutindex_to_typeindex_iter.unsafe_get(txoutindex);
self.addresstype_to_typeindex_and_unspentoutpoint
.get_mut(outputtype)
.unwrap()
.insert(TypeIndexAndOutPoint::from((typeindex, outpoint)), Unit);
}
}
});
} else {
unreachable!();
// self.addresstype_to_typeindex_and_txindex
// .iter_mut()
// .try_for_each(|s| s.reset())?;
// self.addresstype_to_typeindex_and_unspentoutpoint
// .iter_mut()
// .try_for_each(|s| s.reset())?;
}
self.commit(starting_indexes.height.decremented().unwrap_or_default())?;
Ok(())
}
}

View File

@@ -1,12 +1,11 @@
use std::{fs, path::Path}; use std::{fs, path::Path};
use brk_error::Result; use brk_error::Result;
use brk_grouper::ByAddressType;
use brk_store::{AnyStore, StoreFjallV2 as Store}; use brk_store::{AnyStore, StoreFjallV2 as Store};
use brk_types::{ use brk_types::{
AddressBytes, AddressBytesHash, BlockHashPrefix, Height, OutPoint, StoredString, TxIndex, AddressBytes, AddressBytesHash, AddressTypeAddressIndexOutPoint,
TxOutIndex, TxidPrefix, TypeIndex, TypeIndexAndOutPoint, TypeIndexAndTxIndex, Unit, Version, AddressTypeAddressIndexTxIndex, BlockHashPrefix, Height, OutPoint, StoredString, TxIndex,
Vout, TxOutIndex, TxidPrefix, TypeIndex, Unit, Version, Vout,
}; };
use fjall2::{PersistMode, TransactionalKeyspace}; use fjall2::{PersistMode, TransactionalKeyspace};
use rayon::prelude::*; use rayon::prelude::*;
@@ -24,9 +23,9 @@ pub struct Stores {
pub blockhashprefix_to_height: Store<BlockHashPrefix, Height>, pub blockhashprefix_to_height: Store<BlockHashPrefix, Height>,
pub height_to_coinbase_tag: Store<Height, StoredString>, pub height_to_coinbase_tag: Store<Height, StoredString>,
pub txidprefix_to_txindex: Store<TxidPrefix, TxIndex>, pub txidprefix_to_txindex: Store<TxidPrefix, TxIndex>,
pub addresstype_to_typeindex_and_txindex: ByAddressType<Store<TypeIndexAndTxIndex, Unit>>, pub addresstype_to_addressindex_and_txindex: Store<AddressTypeAddressIndexTxIndex, Unit>,
pub addresstype_to_typeindex_and_unspentoutpoint: pub addresstype_to_addressindex_and_unspentoutpoint:
ByAddressType<Store<TypeIndexAndOutPoint, Unit>>, Store<AddressTypeAddressIndexOutPoint, Unit>,
} }
impl Stores { impl Stores {
@@ -46,31 +45,26 @@ impl Stores {
let keyspace_ref = &keyspace; let keyspace_ref = &keyspace;
let create_addressindex_and_txindex_store = |index| {
Store::import(
keyspace_ref,
path,
&format!("a2t{}", index),
version,
Some(false),
)
};
let create_addressindex_and_unspentoutpoint_store =
|index| Store::import(keyspace_ref, path, &format!("a2u{}", index), version, None);
Ok(Self { Ok(Self {
keyspace: keyspace.clone(), keyspace: keyspace.clone(),
height_to_coinbase_tag: Store::import(keyspace_ref, path, "h2c", version, None)?, height_to_coinbase_tag: Store::import(keyspace_ref, path, "h2c", version, true)?,
addressbyteshash_to_typeindex: Store::import(keyspace_ref, path, "a2t", version, None)?, addressbyteshash_to_typeindex: Store::import(keyspace_ref, path, "a2t", version, true)?,
blockhashprefix_to_height: Store::import(keyspace_ref, path, "b2h", version, None)?, blockhashprefix_to_height: Store::import(keyspace_ref, path, "b2h", version, true)?,
txidprefix_to_txindex: Store::import(keyspace_ref, path, "t2t", version, None)?, txidprefix_to_txindex: Store::import(keyspace_ref, path, "t2t", version, true)?,
addresstype_to_typeindex_and_txindex: ByAddressType::new_with_index( addresstype_to_addressindex_and_txindex: Store::import(
create_addressindex_and_txindex_store, keyspace_ref,
path,
"aat",
version,
false,
)?, )?,
addresstype_to_typeindex_and_unspentoutpoint: ByAddressType::new_with_index( addresstype_to_addressindex_and_unspentoutpoint: Store::import(
create_addressindex_and_unspentoutpoint_store, keyspace_ref,
path,
"aau",
version,
false,
)?, )?,
}) })
} }
@@ -92,18 +86,10 @@ impl Stores {
&mut self.blockhashprefix_to_height, &mut self.blockhashprefix_to_height,
&mut self.height_to_coinbase_tag, &mut self.height_to_coinbase_tag,
&mut self.txidprefix_to_txindex, &mut self.txidprefix_to_txindex,
&mut self.addresstype_to_addressindex_and_txindex,
&mut self.addresstype_to_addressindex_and_unspentoutpoint,
] ]
.into_par_iter() // Changed from par_iter_mut() .into_par_iter() // Changed from par_iter_mut()
.chain(
self.addresstype_to_typeindex_and_txindex
.par_iter_mut()
.map(|s| s as &mut dyn AnyStore),
)
.chain(
self.addresstype_to_typeindex_and_unspentoutpoint
.par_iter_mut()
.map(|s| s as &mut dyn AnyStore),
)
.try_for_each(|store| store.commit(height))?; .try_for_each(|store| store.commit(height))?;
self.keyspace self.keyspace
@@ -117,18 +103,10 @@ impl Stores {
&self.blockhashprefix_to_height, &self.blockhashprefix_to_height,
&self.height_to_coinbase_tag, &self.height_to_coinbase_tag,
&self.txidprefix_to_txindex, &self.txidprefix_to_txindex,
&self.addresstype_to_addressindex_and_txindex,
&self.addresstype_to_addressindex_and_unspentoutpoint,
] ]
.into_iter() .into_iter()
.chain(
self.addresstype_to_typeindex_and_txindex
.iter()
.map(|s| s as &dyn AnyStore),
)
.chain(
self.addresstype_to_typeindex_and_unspentoutpoint
.iter()
.map(|s| s as &dyn AnyStore),
)
} }
pub fn rollback_if_needed( pub fn rollback_if_needed(
@@ -140,20 +118,10 @@ impl Stores {
&& self.blockhashprefix_to_height.is_empty()? && self.blockhashprefix_to_height.is_empty()?
&& self.txidprefix_to_txindex.is_empty()? && self.txidprefix_to_txindex.is_empty()?
&& self.height_to_coinbase_tag.is_empty()? && self.height_to_coinbase_tag.is_empty()?
&& self.addresstype_to_addressindex_and_txindex.is_empty()?
&& self && self
.addresstype_to_typeindex_and_txindex .addresstype_to_addressindex_and_unspentoutpoint
.iter() .is_empty()?
.map(|s| s.is_empty())
.collect::<Result<Vec<_>>>()?
.into_iter()
.all(|empty| empty)
&& self
.addresstype_to_typeindex_and_unspentoutpoint
.iter()
.map(|s| s.is_empty())
.collect::<Result<Vec<_>>>()?
.into_iter()
.all(|empty| empty)
{ {
return Ok(()); return Ok(());
} }
@@ -175,7 +143,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2pk65addressindex .height_to_first_p2pk65addressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2pk65addressindex_to_p2pk65bytes_iter = let mut p2pk65addressindex_to_p2pk65bytes_iter =
vecs.p2pk65addressindex_to_p2pk65bytes.iter()?; vecs.p2pk65addressindex_to_p2pk65bytes.iter()?;
@@ -190,7 +158,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2pk33addressindex .height_to_first_p2pk33addressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2pk33addressindex_to_p2pk33bytes_iter = let mut p2pk33addressindex_to_p2pk33bytes_iter =
vecs.p2pk33addressindex_to_p2pk33bytes.iter()?; vecs.p2pk33addressindex_to_p2pk33bytes.iter()?;
@@ -205,7 +173,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2pkhaddressindex .height_to_first_p2pkhaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2pkhaddressindex_to_p2pkhbytes_iter = let mut p2pkhaddressindex_to_p2pkhbytes_iter =
vecs.p2pkhaddressindex_to_p2pkhbytes.iter()?; vecs.p2pkhaddressindex_to_p2pkhbytes.iter()?;
@@ -220,7 +188,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2shaddressindex .height_to_first_p2shaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2shaddressindex_to_p2shbytes_iter = let mut p2shaddressindex_to_p2shbytes_iter =
vecs.p2shaddressindex_to_p2shbytes.iter()?; vecs.p2shaddressindex_to_p2shbytes.iter()?;
@@ -235,7 +203,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2traddressindex .height_to_first_p2traddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2traddressindex_to_p2trbytes_iter = let mut p2traddressindex_to_p2trbytes_iter =
vecs.p2traddressindex_to_p2trbytes.iter()?; vecs.p2traddressindex_to_p2trbytes.iter()?;
@@ -250,7 +218,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2wpkhaddressindex .height_to_first_p2wpkhaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2wpkhaddressindex_to_p2wpkhbytes_iter = let mut p2wpkhaddressindex_to_p2wpkhbytes_iter =
vecs.p2wpkhaddressindex_to_p2wpkhbytes.iter()?; vecs.p2wpkhaddressindex_to_p2wpkhbytes.iter()?;
@@ -265,7 +233,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2wshaddressindex .height_to_first_p2wshaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2wshaddressindex_to_p2wshbytes_iter = let mut p2wshaddressindex_to_p2wshbytes_iter =
vecs.p2wshaddressindex_to_p2wshbytes.iter()?; vecs.p2wshaddressindex_to_p2wshbytes.iter()?;
@@ -280,7 +248,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2aaddressindex .height_to_first_p2aaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2aaddressindex_to_p2abytes_iter = let mut p2aaddressindex_to_p2abytes_iter =
vecs.p2aaddressindex_to_p2abytes.iter()?; vecs.p2aaddressindex_to_p2abytes.iter()?;
@@ -338,21 +306,28 @@ impl Stores {
.skip(starting_indexes.txoutindex.to_usize()), .skip(starting_indexes.txoutindex.to_usize()),
) )
.filter(|((_, outputtype), _)| outputtype.is_address()) .filter(|((_, outputtype), _)| outputtype.is_address())
.for_each(|((txoutindex, outputtype), typeindex)| { .for_each(|((txoutindex, addresstype), addressindex)| {
let txindex = txoutindex_to_txindex_iter.unsafe_get_(txoutindex); let txindex = txoutindex_to_txindex_iter.get_unwrap_at(txoutindex);
self.addresstype_to_addressindex_and_txindex.remove(
AddressTypeAddressIndexTxIndex::from((addresstype, addressindex, txindex)),
);
let vout = Vout::from( let vout = Vout::from(
txoutindex.to_usize() txoutindex.to_usize()
- txindex_to_first_txoutindex_iter - txindex_to_first_txoutindex_iter
.unsafe_get(txindex) .get_unwrap(txindex)
.to_usize(), .to_usize(),
); );
let outpoint = OutPoint::new(txindex, vout); let outpoint = OutPoint::new(txindex, vout);
self.addresstype_to_typeindex_and_unspentoutpoint self.addresstype_to_addressindex_and_unspentoutpoint.remove(
.get_mut(outputtype) AddressTypeAddressIndexOutPoint::from((
.unwrap() addresstype,
.remove(TypeIndexAndOutPoint::from((typeindex, outpoint))); addressindex,
outpoint,
)),
);
}); });
// Add back outputs that were spent after the rollback point // Add back outputs that were spent after the rollback point
@@ -371,19 +346,32 @@ impl Stores {
let vout = outpoint.vout(); let vout = outpoint.vout();
// Calculate txoutindex from txindex and vout // Calculate txoutindex from txindex and vout
let txoutindex = txindex_to_first_txoutindex_iter.unsafe_get(txindex) + vout; let txoutindex = txindex_to_first_txoutindex_iter.get_unwrap(txindex) + vout;
// Only process if this output was created before the rollback point // Only process if this output was created before the rollback point
if txoutindex < starting_indexes.txoutindex { if txoutindex < starting_indexes.txoutindex {
let outputtype = txoutindex_to_outputtype_iter.unsafe_get(txoutindex); let outputtype = txoutindex_to_outputtype_iter.get_unwrap(txoutindex);
if outputtype.is_address() { if outputtype.is_address() {
let typeindex = txoutindex_to_typeindex_iter.unsafe_get(txoutindex); let addresstype = outputtype;
let addressindex = txoutindex_to_typeindex_iter.get_unwrap(txoutindex);
self.addresstype_to_typeindex_and_unspentoutpoint self.addresstype_to_addressindex_and_txindex.remove(
.get_mut(outputtype) AddressTypeAddressIndexTxIndex::from((
.unwrap() addresstype,
.insert(TypeIndexAndOutPoint::from((typeindex, outpoint)), Unit); addressindex,
txindex,
)),
);
self.addresstype_to_addressindex_and_unspentoutpoint.insert(
AddressTypeAddressIndexOutPoint::from((
addresstype,
addressindex,
outpoint,
)),
Unit,
);
} }
} }
}); });

View File

@@ -1,15 +1,13 @@
use std::{fs, path::Path}; use std::{fs, path::Path};
use brk_error::Result; use brk_error::Result;
use brk_grouper::ByAddressType;
use brk_store::{AnyStore, StoreFjallV3 as Store}; use brk_store::{AnyStore, StoreFjallV3 as Store};
use brk_types::{ use brk_types::{
AddressBytes, AddressBytesHash, BlockHashPrefix, Height, OutPoint, StoredString, TxIndex, AddressBytes, AddressBytesHash, AddressTypeAddressIndexOutPoint,
TxOutIndex, TxidPrefix, TypeIndex, TypeIndexAndOutPoint, TypeIndexAndTxIndex, Unit, Version, AddressTypeAddressIndexTxIndex, BlockHashPrefix, Height, OutPoint, StoredString, TxIndex,
Vout, TxOutIndex, TxidPrefix, TypeIndex, Unit, Version, Vout,
}; };
use fjall3::{Database, PersistMode}; use fjall3::{Database, PersistMode};
use log::info;
use rayon::prelude::*; use rayon::prelude::*;
use vecdb::{AnyVec, GenericStoredVec, StoredIndex, VecIterator, VecIteratorExtended}; use vecdb::{AnyVec, GenericStoredVec, StoredIndex, VecIterator, VecIteratorExtended};
@@ -25,9 +23,9 @@ pub struct Stores {
pub blockhashprefix_to_height: Store<BlockHashPrefix, Height>, pub blockhashprefix_to_height: Store<BlockHashPrefix, Height>,
pub height_to_coinbase_tag: Store<Height, StoredString>, pub height_to_coinbase_tag: Store<Height, StoredString>,
pub txidprefix_to_txindex: Store<TxidPrefix, TxIndex>, pub txidprefix_to_txindex: Store<TxidPrefix, TxIndex>,
pub addresstype_to_typeindex_and_txindex: ByAddressType<Store<TypeIndexAndTxIndex, Unit>>, pub addresstype_to_addressindex_and_txindex: Store<AddressTypeAddressIndexTxIndex, Unit>,
pub addresstype_to_typeindex_and_unspentoutpoint: pub addresstype_to_addressindex_and_unspentoutpoint:
ByAddressType<Store<TypeIndexAndOutPoint, Unit>>, Store<AddressTypeAddressIndexOutPoint, Unit>,
} }
impl Stores { impl Stores {
@@ -47,31 +45,56 @@ impl Stores {
let database_ref = &database; let database_ref = &database;
let create_addressindex_and_txindex_store = |index| {
Store::import(
database_ref,
path,
&format!("a2t{}", index),
version,
Some(false),
)
};
let create_addressindex_and_unspentoutpoint_store =
|index| Store::import(database_ref, path, &format!("a2u{}", index), version, None);
Ok(Self { Ok(Self {
database: database.clone(), database: database.clone(),
height_to_coinbase_tag: Store::import(database_ref, path, "h2c", version, None)?, height_to_coinbase_tag: Store::import(
addressbyteshash_to_typeindex: Store::import(database_ref, path, "a2t", version, None)?, database_ref,
blockhashprefix_to_height: Store::import(database_ref, path, "b2h", version, None)?, path,
txidprefix_to_txindex: Store::import(database_ref, path, "t2t", version, None)?, "height_to_coinbase_tag",
addresstype_to_typeindex_and_txindex: ByAddressType::new_with_index( version,
create_addressindex_and_txindex_store, true,
true,
)?, )?,
addresstype_to_typeindex_and_unspentoutpoint: ByAddressType::new_with_index( addressbyteshash_to_typeindex: Store::import(
create_addressindex_and_unspentoutpoint_store, database_ref,
path,
"addressbyteshash_to_typeindex",
version,
true,
false,
)?,
blockhashprefix_to_height: Store::import(
database_ref,
path,
"blockhashprefix_to_height",
version,
true,
false,
)?,
txidprefix_to_txindex: Store::import(
database_ref,
path,
"txidprefix_to_txindex",
version,
true,
false,
)?,
addresstype_to_addressindex_and_txindex: Store::import(
database_ref,
path,
"addresstype_to_addressindex_and_txindex",
version,
false,
false,
)?,
addresstype_to_addressindex_and_unspentoutpoint: Store::import(
database_ref,
path,
"addresstype_to_addressindex_and_unspentoutpoint",
version,
false,
false,
)?, )?,
}) })
} }
@@ -88,37 +111,17 @@ impl Stores {
} }
pub fn commit(&mut self, height: Height) -> Result<()> { pub fn commit(&mut self, height: Height) -> Result<()> {
info!(
"database.write_buffer_size = {}",
self.database.write_buffer_size()
);
info!("database.journal_count = {}", self.database.journal_count());
[ [
&mut self.addressbyteshash_to_typeindex as &mut dyn AnyStore, &mut self.addressbyteshash_to_typeindex as &mut dyn AnyStore,
&mut self.blockhashprefix_to_height, &mut self.blockhashprefix_to_height,
&mut self.height_to_coinbase_tag, &mut self.height_to_coinbase_tag,
&mut self.txidprefix_to_txindex, &mut self.txidprefix_to_txindex,
&mut self.addresstype_to_addressindex_and_txindex,
&mut self.addresstype_to_addressindex_and_unspentoutpoint,
] ]
.into_par_iter() // Changed from par_iter_mut() .into_par_iter() // Changed from par_iter_mut()
.chain(
self.addresstype_to_typeindex_and_txindex
.par_iter_mut()
.map(|s| s as &mut dyn AnyStore),
)
.chain(
self.addresstype_to_typeindex_and_unspentoutpoint
.par_iter_mut()
.map(|s| s as &mut dyn AnyStore),
)
.try_for_each(|store| store.commit(height))?; .try_for_each(|store| store.commit(height))?;
info!(
"database.write_buffer_size = {}",
self.database.write_buffer_size()
);
info!("database.journal_count = {}", self.database.journal_count());
self.database self.database
.persist(PersistMode::SyncAll) .persist(PersistMode::SyncAll)
.map_err(|e| e.into()) .map_err(|e| e.into())
@@ -130,18 +133,10 @@ impl Stores {
&self.blockhashprefix_to_height, &self.blockhashprefix_to_height,
&self.height_to_coinbase_tag, &self.height_to_coinbase_tag,
&self.txidprefix_to_txindex, &self.txidprefix_to_txindex,
&self.addresstype_to_addressindex_and_txindex,
&self.addresstype_to_addressindex_and_unspentoutpoint,
] ]
.into_iter() .into_iter()
.chain(
self.addresstype_to_typeindex_and_txindex
.iter()
.map(|s| s as &dyn AnyStore),
)
.chain(
self.addresstype_to_typeindex_and_unspentoutpoint
.iter()
.map(|s| s as &dyn AnyStore),
)
} }
pub fn rollback_if_needed( pub fn rollback_if_needed(
@@ -153,20 +148,10 @@ impl Stores {
&& self.blockhashprefix_to_height.is_empty()? && self.blockhashprefix_to_height.is_empty()?
&& self.txidprefix_to_txindex.is_empty()? && self.txidprefix_to_txindex.is_empty()?
&& self.height_to_coinbase_tag.is_empty()? && self.height_to_coinbase_tag.is_empty()?
&& self.addresstype_to_addressindex_and_txindex.is_empty()?
&& self && self
.addresstype_to_typeindex_and_txindex .addresstype_to_addressindex_and_unspentoutpoint
.iter() .is_empty()?
.map(|s| s.is_empty())
.collect::<Result<Vec<_>>>()?
.into_iter()
.all(|empty| empty)
&& self
.addresstype_to_typeindex_and_unspentoutpoint
.iter()
.map(|s| s.is_empty())
.collect::<Result<Vec<_>>>()?
.into_iter()
.all(|empty| empty)
{ {
return Ok(()); return Ok(());
} }
@@ -188,7 +173,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2pk65addressindex .height_to_first_p2pk65addressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2pk65addressindex_to_p2pk65bytes_iter = let mut p2pk65addressindex_to_p2pk65bytes_iter =
vecs.p2pk65addressindex_to_p2pk65bytes.iter()?; vecs.p2pk65addressindex_to_p2pk65bytes.iter()?;
@@ -203,7 +188,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2pk33addressindex .height_to_first_p2pk33addressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2pk33addressindex_to_p2pk33bytes_iter = let mut p2pk33addressindex_to_p2pk33bytes_iter =
vecs.p2pk33addressindex_to_p2pk33bytes.iter()?; vecs.p2pk33addressindex_to_p2pk33bytes.iter()?;
@@ -218,7 +203,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2pkhaddressindex .height_to_first_p2pkhaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2pkhaddressindex_to_p2pkhbytes_iter = let mut p2pkhaddressindex_to_p2pkhbytes_iter =
vecs.p2pkhaddressindex_to_p2pkhbytes.iter()?; vecs.p2pkhaddressindex_to_p2pkhbytes.iter()?;
@@ -233,7 +218,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2shaddressindex .height_to_first_p2shaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2shaddressindex_to_p2shbytes_iter = let mut p2shaddressindex_to_p2shbytes_iter =
vecs.p2shaddressindex_to_p2shbytes.iter()?; vecs.p2shaddressindex_to_p2shbytes.iter()?;
@@ -248,7 +233,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2traddressindex .height_to_first_p2traddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2traddressindex_to_p2trbytes_iter = let mut p2traddressindex_to_p2trbytes_iter =
vecs.p2traddressindex_to_p2trbytes.iter()?; vecs.p2traddressindex_to_p2trbytes.iter()?;
@@ -263,7 +248,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2wpkhaddressindex .height_to_first_p2wpkhaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2wpkhaddressindex_to_p2wpkhbytes_iter = let mut p2wpkhaddressindex_to_p2wpkhbytes_iter =
vecs.p2wpkhaddressindex_to_p2wpkhbytes.iter()?; vecs.p2wpkhaddressindex_to_p2wpkhbytes.iter()?;
@@ -278,7 +263,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2wshaddressindex .height_to_first_p2wshaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2wshaddressindex_to_p2wshbytes_iter = let mut p2wshaddressindex_to_p2wshbytes_iter =
vecs.p2wshaddressindex_to_p2wshbytes.iter()?; vecs.p2wshaddressindex_to_p2wshbytes.iter()?;
@@ -293,7 +278,7 @@ impl Stores {
if let Ok(mut index) = vecs if let Ok(mut index) = vecs
.height_to_first_p2aaddressindex .height_to_first_p2aaddressindex
.one_shot_read(starting_indexes.height) .read(starting_indexes.height)
{ {
let mut p2aaddressindex_to_p2abytes_iter = let mut p2aaddressindex_to_p2abytes_iter =
vecs.p2aaddressindex_to_p2abytes.iter()?; vecs.p2aaddressindex_to_p2abytes.iter()?;
@@ -351,21 +336,28 @@ impl Stores {
.skip(starting_indexes.txoutindex.to_usize()), .skip(starting_indexes.txoutindex.to_usize()),
) )
.filter(|((_, outputtype), _)| outputtype.is_address()) .filter(|((_, outputtype), _)| outputtype.is_address())
.for_each(|((txoutindex, outputtype), typeindex)| { .for_each(|((txoutindex, addresstype), addressindex)| {
let txindex = txoutindex_to_txindex_iter.unsafe_get_(txoutindex); let txindex = txoutindex_to_txindex_iter.get_unwrap_at(txoutindex);
self.addresstype_to_addressindex_and_txindex.remove(
AddressTypeAddressIndexTxIndex::from((addresstype, addressindex, txindex)),
);
let vout = Vout::from( let vout = Vout::from(
txoutindex.to_usize() txoutindex.to_usize()
- txindex_to_first_txoutindex_iter - txindex_to_first_txoutindex_iter
.unsafe_get(txindex) .get_unwrap(txindex)
.to_usize(), .to_usize(),
); );
let outpoint = OutPoint::new(txindex, vout); let outpoint = OutPoint::new(txindex, vout);
self.addresstype_to_typeindex_and_unspentoutpoint self.addresstype_to_addressindex_and_unspentoutpoint.remove(
.get_mut(outputtype) AddressTypeAddressIndexOutPoint::from((
.unwrap() addresstype,
.remove(TypeIndexAndOutPoint::from((typeindex, outpoint))); addressindex,
outpoint,
)),
);
}); });
// Add back outputs that were spent after the rollback point // Add back outputs that were spent after the rollback point
@@ -384,19 +376,32 @@ impl Stores {
let vout = outpoint.vout(); let vout = outpoint.vout();
// Calculate txoutindex from txindex and vout // Calculate txoutindex from txindex and vout
let txoutindex = txindex_to_first_txoutindex_iter.unsafe_get(txindex) + vout; let txoutindex = txindex_to_first_txoutindex_iter.get_unwrap(txindex) + vout;
// Only process if this output was created before the rollback point // Only process if this output was created before the rollback point
if txoutindex < starting_indexes.txoutindex { if txoutindex < starting_indexes.txoutindex {
let outputtype = txoutindex_to_outputtype_iter.unsafe_get(txoutindex); let outputtype = txoutindex_to_outputtype_iter.get_unwrap(txoutindex);
if outputtype.is_address() { if outputtype.is_address() {
let typeindex = txoutindex_to_typeindex_iter.unsafe_get(txoutindex); let addresstype = outputtype;
let addressindex = txoutindex_to_typeindex_iter.get_unwrap(txoutindex);
self.addresstype_to_typeindex_and_unspentoutpoint self.addresstype_to_addressindex_and_txindex.remove(
.get_mut(outputtype) AddressTypeAddressIndexTxIndex::from((
.unwrap() addresstype,
.insert(TypeIndexAndOutPoint::from((typeindex, outpoint)), Unit); addressindex,
txindex,
)),
);
self.addresstype_to_addressindex_and_unspentoutpoint.insert(
AddressTypeAddressIndexOutPoint::from((
addresstype,
addressindex,
outpoint,
)),
Unit,
);
} }
} }
}); });

View File

@@ -1,4 +1,4 @@
use std::{path::Path, time::Instant}; use std::time::Instant;
use brk_error::Result; use brk_error::Result;
use brk_iterator::Blocks; use brk_iterator::Blocks;
@@ -7,13 +7,10 @@ use brk_rpc::{Auth, Client};
use brk_types::Height; use brk_types::Height;
fn main() -> Result<()> { fn main() -> Result<()> {
let bitcoin_dir = Path::new(&std::env::var("HOME").unwrap()) let bitcoin_dir = Client::default_bitcoin_path();
.join("Library")
.join("Application Support")
.join("Bitcoin");
let client = Client::new( let client = Client::new(
"http://localhost:8332", Client::default_url(),
Auth::CookieFile(bitcoin_dir.join(".cookie")), Auth::CookieFile(bitcoin_dir.join(".cookie")),
)?; )?;

View File

@@ -23,7 +23,8 @@ pub fn init(path: Option<&Path>) -> io::Result<()> {
}); });
Builder::from_env(Env::default().default_filter_or( Builder::from_env(Env::default().default_filter_or(
"debug,bitcoin=off,bitcoincore-rpc=off,rolldown=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,brk_aide=off", "info,bitcoin=off,bitcoincore-rpc=off,fjall=off,lsm-tree=off,rolldown=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,brk_aide=off",
// "debug,bitcoin=off,bitcoincore-rpc=off,rolldown=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,brk_aide=off",
)) ))
.format(move |buf, record| { .format(move |buf, record| {
let date_time = Timestamp::now() let date_time = Timestamp::now()

View File

@@ -1,4 +1,4 @@
use std::{path::Path, thread, time::Duration}; use std::{thread, time::Duration};
use brk_error::Result; use brk_error::Result;
use brk_monitor::Mempool; use brk_monitor::Mempool;
@@ -6,14 +6,11 @@ use brk_rpc::{Auth, Client};
fn main() -> Result<()> { fn main() -> Result<()> {
// Connect to Bitcoin Core // Connect to Bitcoin Core
let bitcoin_dir = Path::new(&std::env::var("HOME").unwrap()) let bitcoin_dir = Client::default_bitcoin_path();
.join("Library")
.join("Application Support")
.join("Bitcoin");
// let bitcoin_dir = Path::new("/Volumes/WD_BLACK/bitcoin"); // let bitcoin_dir = Path::new("/Volumes/WD_BLACK/bitcoin");
let client = Client::new( let client = Client::new(
"http://localhost:8332", Client::default_url(),
Auth::CookieFile(bitcoin_dir.join(".cookie")), Auth::CookieFile(bitcoin_dir.join(".cookie")),
)?; )?;

View File

@@ -1,4 +1,4 @@
use std::{fs, path::Path}; use std::{env, fs, path::Path};
use brk_computer::Computer; use brk_computer::Computer;
use brk_error::Result; use brk_error::Result;
@@ -10,24 +10,21 @@ use brk_types::Index;
use vecdb::Exit; use vecdb::Exit;
pub fn main() -> Result<()> { pub fn main() -> Result<()> {
let bitcoin_dir = Path::new(&std::env::var("HOME").unwrap()) let bitcoin_dir = Client::default_bitcoin_path();
.join("Library")
.join("Application Support")
.join("Bitcoin");
// let bitcoin_dir = Path::new("/Volumes/WD_BLACK1/bitcoin"); // let bitcoin_dir = Path::new("/Volumes/WD_BLACK1/bitcoin");
let blocks_dir = bitcoin_dir.join("blocks"); let blocks_dir = bitcoin_dir.join("blocks");
let outputs_dir = Path::new(&std::env::var("HOME").unwrap()).join(".brk"); let outputs_dir = Path::new(&env::var("HOME").unwrap()).join(".brk");
fs::create_dir_all(&outputs_dir)?; fs::create_dir_all(&outputs_dir)?;
// let outputs_dir = Path::new("/Volumes/WD_BLACK1/brk"); // let outputs_dir = Path::new("/Volumes/WD_BLACK1/brk");
let client = Client::new( let client = Client::new(
"http://localhost:8332", Client::default_url(),
Auth::CookieFile(bitcoin_dir.join(".cookie")), Auth::CookieFile(bitcoin_dir.join(".cookie")),
)?; )?;
let outputs_dir = Path::new(&std::env::var("HOME").unwrap()).join(".brk"); let outputs_dir = Path::new(&env::var("HOME").unwrap()).join(".brk");
// let outputs_dir = Path::new("../../_outputs"); // let outputs_dir = Path::new("../../_outputs");
let exit = Exit::new(); let exit = Exit::new();

View File

@@ -1,5 +1,3 @@
use std::path::Path;
use brk_error::Result; use brk_error::Result;
use brk_reader::Reader; use brk_reader::Reader;
use brk_rpc::{Auth, Client}; use brk_rpc::{Auth, Client};
@@ -8,13 +6,10 @@ use brk_rpc::{Auth, Client};
fn main() -> Result<()> { fn main() -> Result<()> {
let i = std::time::Instant::now(); let i = std::time::Instant::now();
let bitcoin_dir = Path::new(&std::env::var("HOME").unwrap()) let bitcoin_dir = Client::default_bitcoin_path();
.join("Library")
.join("Application Support")
.join("Bitcoin");
let client = Client::new( let client = Client::new(
"http://localhost:8332", Client::default_url(),
Auth::CookieFile(bitcoin_dir.join(".cookie")), Auth::CookieFile(bitcoin_dir.join(".cookie")),
)?; )?;

View File

@@ -1,19 +1,14 @@
use std::path::Path;
use bitcoincore_rpc::RpcApi; use bitcoincore_rpc::RpcApi;
use brk_rpc::{Auth, Client}; use brk_rpc::{Auth, Client};
fn main() { fn main() {
brk_logger::init(None).unwrap(); brk_logger::init(None).unwrap();
let bitcoin_dir = Path::new(&std::env::var("HOME").unwrap()) let bitcoin_dir = Client::default_bitcoin_path();
.join("Library")
.join("Application Support")
.join("Bitcoin");
let auth = Auth::CookieFile(bitcoin_dir.join(".cookie")); let auth = Auth::CookieFile(bitcoin_dir.join(".cookie"));
let client = Client::new("http://localhost:8332", auth).unwrap(); let client = Client::new(Client::default_url(), auth).unwrap();
loop { loop {
println!("{:?}", client.call(|c| c.get_block_count()).unwrap()); println!("{:?}", client.call(|c| c.get_block_count()).unwrap());

View File

@@ -1,3 +1,5 @@
use std::env;
use std::path::{Path, PathBuf};
use std::thread::sleep; use std::thread::sleep;
use std::{mem, sync::Arc, time::Duration}; use std::{mem, sync::Arc, time::Duration};
@@ -270,4 +272,27 @@ impl Client {
{ {
self.0.call_once(f) self.0.call_once(f)
} }
pub fn default_url() -> &'static str {
"http://localhost:8332"
}
pub fn default_bitcoin_path() -> PathBuf {
if env::consts::OS == "macos" {
Self::default_mac_bitcoin_path()
} else {
Self::default_linux_bitcoin_path()
}
}
pub fn default_linux_bitcoin_path() -> PathBuf {
Path::new(&env::var("HOME").unwrap()).join(".bitcoin")
}
pub fn default_mac_bitcoin_path() -> PathBuf {
Path::new(&env::var("HOME").unwrap())
.join("Library")
.join("Application Support")
.join("Bitcoin")
}
} }

View File

@@ -29,17 +29,14 @@ pub fn main() -> Result<()> {
fn run() -> Result<()> { fn run() -> Result<()> {
brk_logger::init(Some(Path::new(".log")))?; brk_logger::init(Some(Path::new(".log")))?;
let bitcoin_dir = Path::new(&std::env::var("HOME").unwrap()) let bitcoin_dir = Client::default_bitcoin_path();
.join("Library")
.join("Application Support")
.join("Bitcoin");
// let bitcoin_dir = Path::new("/Volumes/WD_BLACK1/bitcoin"); // let bitcoin_dir = Path::new("/Volumes/WD_BLACK1/bitcoin");
let outputs_dir = Path::new(&std::env::var("HOME").unwrap()).join(".brk"); let outputs_dir = Path::new(&std::env::var("HOME").unwrap()).join(".brk");
// let outputs_dir = Path::new("../../_outputs"); // let outputs_dir = Path::new("../../_outputs");
let client = Client::new( let client = Client::new(
"http://localhost:8332", Client::default_url(),
Auth::CookieFile(bitcoin_dir.join(".cookie")), Auth::CookieFile(bitcoin_dir.join(".cookie")),
)?; )?;

View File

@@ -21,5 +21,4 @@ fjall2 = { workspace = true }
fjall3 = { workspace = true } fjall3 = { workspace = true }
log = { workspace = true } log = { workspace = true }
parking_lot = { workspace = true } parking_lot = { workspace = true }
redb = { workspace = true }
rustc-hash = { workspace = true } rustc-hash = { workspace = true }

View File

@@ -28,9 +28,7 @@ pub struct StoreFjallV2<Key, Value> {
const MAJOR_FJALL_VERSION: Version = Version::TWO; const MAJOR_FJALL_VERSION: Version = Version::TWO;
pub fn open_keyspace(path: &Path) -> fjall2::Result<TransactionalKeyspace> { pub fn open_keyspace(path: &Path) -> fjall2::Result<TransactionalKeyspace> {
fjall2::Config::new(path.join("fjall")) fjall2::Config::new(path.join("fjall")).open_transactional()
.max_write_buffer_size(32 * 1024 * 1024)
.open_transactional()
} }
impl<K, V> StoreFjallV2<K, V> impl<K, V> StoreFjallV2<K, V>
@@ -42,16 +40,14 @@ where
fn open_partition_handle( fn open_partition_handle(
keyspace: &TransactionalKeyspace, keyspace: &TransactionalKeyspace,
name: &str, name: &str,
bloom_filters: Option<bool>, bloom_filters: bool,
) -> Result<TransactionalPartitionHandle> { ) -> Result<TransactionalPartitionHandle> {
let mut options = PartitionCreateOptions::default() let mut options = PartitionCreateOptions::default().manual_journal_persist(true);
.max_memtable_size(8 * 1024 * 1024)
.manual_journal_persist(true);
if bloom_filters.is_some_and(|b| !b) { if bloom_filters {
options = options.bloom_filter_bits(None);
} else {
options = options.bloom_filter_bits(Some(5)); options = options.bloom_filter_bits(Some(5));
} else {
options = options.bloom_filter_bits(None);
} }
keyspace.open_partition(name, options).map_err(|e| e.into()) keyspace.open_partition(name, options).map_err(|e| e.into())
@@ -62,7 +58,7 @@ where
path: &Path, path: &Path,
name: &str, name: &str,
version: Version, version: Version,
bloom_filters: Option<bool>, bloom_filters: bool,
) -> Result<Self> { ) -> Result<Self> {
fs::create_dir_all(path)?; fs::create_dir_all(path)?;

View File

@@ -29,7 +29,6 @@ const MAJOR_FJALL_VERSION: Version = Version::new(3);
pub fn open_fjall3_database(path: &Path) -> fjall3::Result<Database> { pub fn open_fjall3_database(path: &Path) -> fjall3::Result<Database> {
Database::builder(path.join("fjall")) Database::builder(path.join("fjall"))
.max_write_buffer_size(32 * 1024 * 1024)
.cache_size(1024 * 1024 * 1024) .cache_size(1024 * 1024 * 1024)
.open() .open()
} }
@@ -40,34 +39,13 @@ where
V: Debug + Clone + From<ByteView>, V: Debug + Clone + From<ByteView>,
ByteView: From<K> + From<V>, ByteView: From<K> + From<V>,
{ {
fn open_keyspace(
database: &Database,
name: &str,
bloom_filters: Option<bool>,
) -> Result<Keyspace> {
let mut options = KeyspaceCreateOptions::default()
.manual_journal_persist(true)
.filter_block_pinning_policy(PinningPolicy::all(false))
.index_block_pinning_policy(PinningPolicy::all(false))
.max_memtable_size(8 * 1024 * 1024);
if bloom_filters.is_some_and(|b| !b) {
options = options.filter_policy(FilterPolicy::disabled());
} else {
options = options.filter_policy(FilterPolicy::all(FilterPolicyEntry::Bloom(
BloomConstructionPolicy::BitsPerKey(5.0),
)));
}
database.keyspace(name, options).map_err(|e| e.into())
}
pub fn import( pub fn import(
database: &Database, database: &Database,
path: &Path, path: &Path,
name: &str, name: &str,
version: Version, version: Version,
bloom_filters: Option<bool>, bloom_filters: bool,
sequential: bool,
) -> Result<Self> { ) -> Result<Self> {
fs::create_dir_all(path)?; fs::create_dir_all(path)?;
@@ -76,7 +54,7 @@ where
&path.join(format!("meta/{name}")), &path.join(format!("meta/{name}")),
MAJOR_FJALL_VERSION + version, MAJOR_FJALL_VERSION + version,
|| { || {
Self::open_keyspace(database, name, bloom_filters).inspect_err(|e| { Self::open_keyspace(database, name, bloom_filters, sequential).inspect_err(|e| {
eprintln!("{e}"); eprintln!("{e}");
eprintln!("Delete {path:?} and try again"); eprintln!("Delete {path:?} and try again");
}) })
@@ -93,6 +71,34 @@ where
}) })
} }
fn open_keyspace(
database: &Database,
name: &str,
bloom_filters: bool,
sequential: bool,
) -> Result<Keyspace> {
let mut options = KeyspaceCreateOptions::default().manual_journal_persist(true);
if bloom_filters {
options = options.filter_policy(FilterPolicy::new(&[
FilterPolicyEntry::Bloom(BloomConstructionPolicy::BitsPerKey(10.0)),
FilterPolicyEntry::Bloom(BloomConstructionPolicy::BitsPerKey(7.5)),
FilterPolicyEntry::Bloom(BloomConstructionPolicy::BitsPerKey(5.0)),
FilterPolicyEntry::Bloom(BloomConstructionPolicy::BitsPerKey(2.5)),
]));
} else {
options = options.filter_policy(FilterPolicy::disabled());
}
if sequential {
options = options
.filter_block_pinning_policy(PinningPolicy::all(false))
.index_block_pinning_policy(PinningPolicy::all(false));
}
database.keyspace(name, options).map_err(|e| e.into())
}
#[inline] #[inline]
pub fn get<'a>(&'a self, key: &'a K) -> Result<Option<Cow<'a, V>>> pub fn get<'a>(&'a self, key: &'a K) -> Result<Option<Cow<'a, V>>>
where where
@@ -114,6 +120,9 @@ where
#[inline] #[inline]
pub fn is_empty(&self) -> Result<bool> { pub fn is_empty(&self) -> Result<bool> {
// self.database
// .read_tx()
// .is_empty(&self.keyspace)
self.keyspace.is_empty().map_err(|e| e.into()) self.keyspace.is_empty().map_err(|e| e.into())
} }
@@ -178,6 +187,7 @@ where
} }
let mut batch = self.database.batch(); let mut batch = self.database.batch();
// let mut batch = self.database.inner().batch();
let mut items = mem::take(&mut self.puts) let mut items = mem::take(&mut self.puts)
.into_iter() .into_iter()
.map(|(key, value)| Item::Value { key, value }) .map(|(key, value)| Item::Value { key, value })
@@ -188,26 +198,14 @@ where
) )
.collect::<Vec<_>>(); .collect::<Vec<_>>();
items.sort_unstable(); items.sort_unstable();
batch.data = items batch.ingest(
items
.into_iter() .into_iter()
.map(|i| i.fjall(&self.keyspace)) .map(|i| i.fjalled(&self.keyspace))
.collect::<Vec<_>>(); .collect::<Vec<_>>(),
);
batch.commit_keyspace(&self.keyspace)?; batch.commit_keyspace(&self.keyspace)?;
// batch.commit_keyspace(self.keyspace.inner())?;
// let mut wtx = self.database.write_tx();
// let mut dels = self.dels.drain().collect::<Vec<_>>();
// dels.sort_unstable();
// dels.into_iter()
// .for_each(|key| wtx.remove(&self.keyspace, ByteView::from(key)));
// let mut puts = self.puts.drain().collect::<Vec<_>>();
// puts.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));
// puts.into_iter().for_each(|(key, value)| {
// wtx.insert(&self.keyspace, ByteView::from(key), ByteView::from(value))
// });
// wtx.commit()?;
Ok(()) Ok(())
} }
@@ -265,20 +263,22 @@ impl<K, V> Item<K, V> {
} }
} }
pub fn fjall(self, keyspace: &Keyspace) -> fjall3::Item pub fn fjalled(self, keyspace: &Keyspace) -> fjall3::Item
where where
K: Into<ByteView>, K: Into<ByteView>,
V: Into<ByteView>, V: Into<ByteView>,
{ {
let keyspace_id = keyspace.id;
// let keyspace_id = keyspace.inner().id;
match self { match self {
Item::Value { key, value } => fjall3::Item { Item::Value { key, value } => fjall3::Item {
keyspace_id: keyspace.id, keyspace_id,
key: key.into().into(), key: key.into().into(),
value: value.into().into(), value: value.into().into(),
value_type: ValueType::Value, value_type: ValueType::Value,
}, },
Item::Tomb(key) => fjall3::Item { Item::Tomb(key) => fjall3::Item {
keyspace_id: keyspace.id, keyspace_id,
key: key.into().into(), key: key.into().into(),
value: [].into(), value: [].into(),
value_type: ValueType::WeakTombstone, value_type: ValueType::WeakTombstone,

View File

@@ -3,9 +3,7 @@
mod any; mod any;
mod fjall_v2; mod fjall_v2;
mod fjall_v3; mod fjall_v3;
mod redb;
pub use any::*; pub use any::*;
pub use fjall_v2::*; pub use fjall_v2::*;
pub use fjall_v3::*; pub use fjall_v3::*;
pub use redb::*;

View File

@@ -1,77 +0,0 @@
use std::{
fs, io,
path::{Path, PathBuf},
};
use brk_error::Result;
use brk_types::Version;
use super::Height;
#[derive(Debug, Clone)]
pub struct StoreMeta {
pathbuf: PathBuf,
version: Version,
height: Option<Height>,
}
impl StoreMeta {
pub fn checked_open(path: &Path, version: Version) -> Result<Self> {
fs::create_dir_all(path)?;
if Version::try_from(Self::path_version_(path).as_path())
.is_ok_and(|prev_version| version != prev_version)
{
todo!();
}
let slf = Self {
pathbuf: path.to_owned(),
version,
height: Height::try_from(Self::path_height_(path).as_path()).ok(),
};
slf.version.write(&slf.path_version())?;
Ok(slf)
}
pub fn version(&self) -> Version {
self.version
}
pub fn export(&mut self, height: Height) -> io::Result<()> {
self.height = Some(height);
height.write(&self.path_height())
}
pub fn path(&self) -> &Path {
&self.pathbuf
}
fn path_version(&self) -> PathBuf {
Self::path_version_(&self.pathbuf)
}
fn path_version_(path: &Path) -> PathBuf {
path.join("version")
}
#[inline]
pub fn height(&self) -> Option<Height> {
self.height
}
#[inline]
pub fn needs(&self, height: Height) -> bool {
self.height.is_none_or(|self_height| height > self_height)
}
#[inline]
pub fn has(&self, height: Height) -> bool {
!self.needs(height)
}
fn path_height(&self) -> PathBuf {
Self::path_height_(&self.pathbuf)
}
fn path_height_(path: &Path) -> PathBuf {
path.join("height")
}
}

View File

@@ -1,232 +0,0 @@
use std::{
borrow::{Borrow, Cow},
fmt::Debug,
fs,
hash::Hash,
mem::{self, transmute},
path::Path,
sync::Arc,
};
use brk_error::Result;
use brk_types::{Height, Version};
use parking_lot::RwLock;
use redb::{
Builder, Database, Durability, Key, ReadOnlyTable, ReadableDatabase, ReadableTableMetadata,
TableDefinition, Value,
};
mod meta;
use meta::*;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::any::AnyStore;
#[derive(Clone)]
pub struct StoreRedb<K, V>
where
K: Key + 'static,
V: Value + 'static,
{
meta: StoreMeta,
name: &'static str,
db: Arc<Database>,
table: Arc<RwLock<Option<ReadOnlyTable<K, V>>>>,
puts: FxHashMap<K, V>,
dels: FxHashSet<K>,
}
const MAJOR_FJALL_VERSION: Version = Version::new(3);
pub fn open_redb_database(path: &Path) -> redb::Result<Database> {
let db = Builder::new()
.set_cache_size(4 * 1024 * 1024 * 1024)
.create(path.join("store.redb"))
.unwrap();
Ok(db)
}
impl<K, V> StoreRedb<K, V>
where
K: Key + Ord + Eq + Hash + 'static,
V: Value + Clone + 'static,
{
pub fn import(
db: &Arc<Database>,
path: &Path,
name: &str,
version: Version,
_bloom_filters: Option<bool>,
) -> Result<Self> {
fs::create_dir_all(path)?;
let meta = StoreMeta::checked_open(
&path.join(format!("meta/{name}")),
MAJOR_FJALL_VERSION + version,
)?;
{
let mut wtx = db.begin_write().unwrap();
wtx.set_durability(Durability::Immediate).unwrap();
let definition: TableDefinition<K, V> = TableDefinition::new(name);
let table = wtx.open_table(definition).unwrap();
drop(table);
wtx.commit().unwrap();
}
let definition: TableDefinition<K, V> = TableDefinition::new(name);
let table = db.begin_read().unwrap().open_table(definition).unwrap();
Ok(Self {
db: db.clone(),
meta,
name: Box::leak(Box::new(name.to_string())),
table: Arc::new(RwLock::new(Some(table))),
puts: FxHashMap::default(),
dels: FxHashSet::default(),
})
}
// In case my hack doesn't work:
// https://github.com/cberner/redb/issues/869
#[inline]
pub fn get<'a>(&'a self, key: &'a K) -> Result<Option<Cow<'a, V>>>
where
&'a K: Borrow<K::SelfType<'a>>,
V: From<V::SelfType<'static>>,
{
if let Some(v) = self.puts.get(key) {
Ok(Some(Cow::Borrowed(v)))
} else if let Some(value) = self.table.read().as_ref().unwrap().get(key).unwrap() {
let selftype: <V as Value>::SelfType<'static> = unsafe { transmute(value.value()) };
let owned: V = selftype.into();
Ok(Some(Cow::Owned(owned)))
} else {
Ok(None)
}
}
#[inline]
pub fn is_empty(&self) -> Result<bool> {
Ok(self.table.read().as_ref().unwrap().len().unwrap() == 0)
}
#[inline]
pub fn insert_if_needed(&mut self, key: K, value: V, height: Height) {
if self.needs(height) {
self.insert(key, value);
}
}
#[inline]
pub fn insert(&mut self, key: K, value: V) {
let _ = self.dels.is_empty() || self.dels.remove(&key);
self.puts.insert(key, value);
}
#[inline]
pub fn remove(&mut self, key: K) {
// Hot path: key was recently inserted
if self.puts.remove(&key).is_some() {
return;
}
let newly_inserted = self.dels.insert(key);
debug_assert!(newly_inserted, "Double deletion at {:?}", self.meta.path());
}
#[inline]
pub fn remove_if_needed(&mut self, key: K, height: Height) {
if self.needs(height) {
self.remove(key)
}
}
#[inline]
fn has(&self, height: Height) -> bool {
self.meta.has(height)
}
#[inline]
fn needs(&self, height: Height) -> bool {
self.meta.needs(height)
}
}
impl<K, V> AnyStore for StoreRedb<K, V>
where
// K: Debug + Clone + From<ByteView> + Ord + Eq + Hash,
// V: Debug + Clone + From<ByteView>,
K: Debug + Clone + Key + Ord + Eq + Hash + 'static + Borrow<K::SelfType<'static>>,
V: Debug + Clone + Value + 'static + Borrow<V::SelfType<'static>>,
// ByteView: From<K> + From<V>,
Self: Send + Sync,
{
fn commit(&mut self, height: Height) -> Result<()> {
if self.has(height) {
return Ok(());
}
self.meta.export(height)?;
if self.puts.is_empty() && self.dels.is_empty() {
return Ok(());
}
// let mut _rtx_lock = self._rtx.write();
// drop(_rtx_lock.take());
let mut table_lock = self.table.write();
drop(table_lock.take());
let mut wtx = self.db.begin_write().unwrap();
wtx.set_durability(Durability::Immediate).unwrap();
let definition: TableDefinition<K, V> = TableDefinition::new(self.name);
let mut table = wtx.open_table(definition).unwrap();
mem::take(&mut self.puts)
.into_iter()
.for_each(|(key, value)| {
table.insert(key, value).unwrap();
});
mem::take(&mut self.dels).into_iter().for_each(|key| {
table.remove(key).unwrap();
});
drop(table);
wtx.commit().unwrap();
table_lock.replace(
self.db
.begin_read()
.unwrap()
.open_table(definition)
.unwrap(),
);
Ok(())
}
fn name(&self) -> &'static str {
self.name
}
fn height(&self) -> Option<Height> {
self.meta.height()
}
fn has(&self, height: Height) -> bool {
self.has(height)
}
fn needs(&self, height: Height) -> bool {
self.needs(height)
}
fn version(&self) -> Version {
self.meta.version()
}
}

View File

@@ -19,7 +19,6 @@ itoa = "1.0.15"
jiff = { workspace = true } jiff = { workspace = true }
num_enum = "0.7.5" num_enum = "0.7.5"
rapidhash = "4.1.1" rapidhash = "4.1.1"
redb = { workspace = true }
ryu = "1.0.20" ryu = "1.0.20"
schemars = { workspace = true } schemars = { workspace = true }
serde = { workspace = true } serde = { workspace = true }

View File

@@ -1,8 +1,5 @@
use std::{cmp::Ordering, mem};
use byteview::ByteView; use byteview::ByteView;
use derive_deref::Deref; use derive_deref::Deref;
use redb::{Key, TypeName, Value};
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use super::AddressBytes; use super::AddressBytes;
@@ -51,40 +48,3 @@ impl From<&AddressBytesHash> for ByteView {
Self::new(value.as_bytes()) Self::new(value.as_bytes())
} }
} }
impl Value for AddressBytesHash {
type SelfType<'a> = AddressBytesHash;
type AsBytes<'a>
= [u8; mem::size_of::<u64>()]
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(mem::size_of::<u64>())
}
fn from_bytes<'a>(data: &'a [u8]) -> AddressBytesHash
where
Self: 'a,
{
AddressBytesHash(u64::from_le_bytes(data.try_into().unwrap()))
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> [u8; mem::size_of::<u64>()]
where
Self: 'a,
Self: 'b,
{
value.0.to_le_bytes()
}
fn type_name() -> TypeName {
TypeName::new("AddressBytesHash")
}
}
impl Key for AddressBytesHash {
fn compare(data1: &[u8], data2: &[u8]) -> Ordering {
Self::from_bytes(data1).cmp(&Self::from_bytes(data2))
}
}

View File

@@ -0,0 +1,73 @@
use std::hash::{Hash, Hasher};
use byteview::ByteView;
use serde::Serialize;
use zerocopy::IntoBytes;
use crate::{AddressTypeAddressIndexTxIndex, OutputType, Vout};
use super::{OutPoint, TypeIndex};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Serialize)]
#[repr(C)]
pub struct AddressTypeAddressIndexOutPoint {
addresstypeaddressindextxindex: AddressTypeAddressIndexTxIndex, // (u8; u64)
vout: Vout, // u16
}
impl Hash for AddressTypeAddressIndexOutPoint {
fn hash<H: Hasher>(&self, state: &mut H) {
let mut buf = [0u8; 11];
buf[..1].copy_from_slice(self.addresstypeaddressindextxindex.addresstype().as_bytes());
buf[1..9].copy_from_slice(
self.addresstypeaddressindextxindex
.addressindextxindex()
.as_bytes(),
);
buf[9..].copy_from_slice(self.vout.as_bytes());
state.write(&buf);
}
}
impl From<(OutputType, TypeIndex, OutPoint)> for AddressTypeAddressIndexOutPoint {
#[inline]
fn from((addresstype, addressindex, outpoint): (OutputType, TypeIndex, OutPoint)) -> Self {
Self {
addresstypeaddressindextxindex: AddressTypeAddressIndexTxIndex::from((
addresstype,
addressindex,
outpoint.txindex(),
)),
vout: outpoint.vout(),
}
}
}
impl From<ByteView> for AddressTypeAddressIndexOutPoint {
#[inline]
fn from(value: ByteView) -> Self {
Self {
addresstypeaddressindextxindex: AddressTypeAddressIndexTxIndex::from(&value[0..9]),
vout: Vout::from(&value[9..]),
}
}
}
impl From<AddressTypeAddressIndexOutPoint> for ByteView {
#[inline]
fn from(value: AddressTypeAddressIndexOutPoint) -> Self {
ByteView::from(&value)
}
}
impl From<&AddressTypeAddressIndexOutPoint> for ByteView {
#[inline]
fn from(value: &AddressTypeAddressIndexOutPoint) -> Self {
ByteView::from(
[
&ByteView::from(value.addresstypeaddressindextxindex),
value.vout.to_be_bytes().as_slice(),
]
.concat(),
)
}
}

View File

@@ -0,0 +1,88 @@
use std::hash::{Hash, Hasher};
use byteview::ByteView;
use serde::Serialize;
use zerocopy::IntoBytes;
use crate::OutputType;
use super::{TxIndex, TypeIndex};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Serialize)]
pub struct AddressTypeAddressIndexTxIndex {
addresstype: OutputType,
addressindextxindex: u64,
}
impl Hash for AddressTypeAddressIndexTxIndex {
fn hash<H: Hasher>(&self, state: &mut H) {
let mut buf = [0u8; 9];
buf[..1].copy_from_slice(self.addresstype.as_bytes());
buf[1..].copy_from_slice(self.addressindextxindex.as_bytes());
state.write(&buf);
}
}
impl AddressTypeAddressIndexTxIndex {
pub fn addresstype(&self) -> OutputType {
self.addresstype
}
pub fn addressindex(&self) -> u32 {
(self.addressindextxindex >> 32) as u32
}
pub fn txindex(&self) -> u32 {
self.addressindextxindex as u32
}
pub fn addressindextxindex(&self) -> u64 {
self.addressindextxindex
}
}
impl From<(OutputType, TypeIndex, TxIndex)> for AddressTypeAddressIndexTxIndex {
#[inline]
fn from((addresstype, addressindex, txindex): (OutputType, TypeIndex, TxIndex)) -> Self {
Self {
addresstype,
addressindextxindex: (u64::from(addressindex) << 32) | u64::from(txindex),
}
}
}
impl From<ByteView> for AddressTypeAddressIndexTxIndex {
#[inline]
fn from(value: ByteView) -> Self {
Self::from(&*value)
}
}
impl From<&[u8]> for AddressTypeAddressIndexTxIndex {
#[inline]
fn from(value: &[u8]) -> Self {
let addresstype = OutputType::from(&value[0..1]);
let addressindex = TypeIndex::from(&value[1..5]);
let txindex = TxIndex::from(&value[5..9]);
Self::from((addresstype, addressindex, txindex))
}
}
impl From<AddressTypeAddressIndexTxIndex> for ByteView {
#[inline]
fn from(value: AddressTypeAddressIndexTxIndex) -> Self {
ByteView::from(&value)
}
}
impl From<&AddressTypeAddressIndexTxIndex> for ByteView {
#[inline]
fn from(value: &AddressTypeAddressIndexTxIndex) -> Self {
ByteView::from(
[
value.addresstype.as_bytes(),
value.addressindextxindex.to_be_bytes().as_slice(),
]
.concat(),
)
}
}

View File

@@ -1,8 +1,5 @@
use std::{cmp::Ordering, mem};
use byteview::ByteView; use byteview::ByteView;
use derive_deref::Deref; use derive_deref::Deref;
use redb::{Key, TypeName, Value};
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::copy_first_8bytes; use crate::copy_first_8bytes;
@@ -60,40 +57,3 @@ impl From<BlockHashPrefix> for ByteView {
Self::from(&value) Self::from(&value)
} }
} }
impl Value for BlockHashPrefix {
type SelfType<'a> = BlockHashPrefix;
type AsBytes<'a>
= [u8; mem::size_of::<u64>()]
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(mem::size_of::<u64>())
}
fn from_bytes<'a>(data: &'a [u8]) -> BlockHashPrefix
where
Self: 'a,
{
BlockHashPrefix(u64::from_le_bytes(data.try_into().unwrap()))
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> [u8; mem::size_of::<u64>()]
where
Self: 'a,
Self: 'b,
{
value.0.to_le_bytes()
}
fn type_name() -> TypeName {
TypeName::new("BlockHashPrefix")
}
}
impl Key for BlockHashPrefix {
fn compare(data1: &[u8], data2: &[u8]) -> Ordering {
Self::from_bytes(data1).cmp(&Self::from_bytes(data2))
}
}

View File

@@ -1,14 +1,11 @@
use std::{ use std::{
cmp::Ordering,
fmt::Debug, fmt::Debug,
mem,
ops::{Add, AddAssign, Rem}, ops::{Add, AddAssign, Rem},
}; };
use allocative::Allocative; use allocative::Allocative;
use byteview::ByteView; use byteview::ByteView;
use derive_deref::Deref; use derive_deref::Deref;
use redb::{Key, TypeName, Value};
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, PrintableIndex, Stamp, StoredCompressed}; use vecdb::{CheckedSub, PrintableIndex, Stamp, StoredCompressed};
@@ -283,40 +280,3 @@ impl std::fmt::Display for Height {
f.write_str(str) f.write_str(str)
} }
} }
impl Value for Height {
type SelfType<'a> = Height;
type AsBytes<'a>
= [u8; mem::size_of::<u32>()]
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(mem::size_of::<u32>())
}
fn from_bytes<'a>(data: &'a [u8]) -> Height
where
Self: 'a,
{
Height(u32::from_le_bytes(data.try_into().unwrap()))
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> [u8; mem::size_of::<u32>()]
where
Self: 'a,
Self: 'b,
{
value.0.to_le_bytes()
}
fn type_name() -> TypeName {
TypeName::new("Height")
}
}
impl Key for Height {
fn compare(data1: &[u8], data2: &[u8]) -> Ordering {
Self::from_bytes(data1).cmp(&Self::from_bytes(data2))
}
}

View File

@@ -10,6 +10,8 @@ mod addressbyteshash;
mod addresschainstats; mod addresschainstats;
mod addressmempoolstats; mod addressmempoolstats;
mod addressstats; mod addressstats;
mod addresstypeaddressindexoutpoint;
mod addresstypeaddressindextxindex;
mod anyaddressindex; mod anyaddressindex;
mod bitcoin; mod bitcoin;
mod blkmetadata; mod blkmetadata;
@@ -92,8 +94,6 @@ mod txoutindex;
mod txstatus; mod txstatus;
mod txversion; mod txversion;
mod typeindex; mod typeindex;
mod typeindexandoutpoint;
mod typeindexandtxindex;
mod unit; mod unit;
mod unknownoutputindex; mod unknownoutputindex;
mod vin; mod vin;
@@ -108,6 +108,8 @@ pub use addressbyteshash::*;
pub use addresschainstats::*; pub use addresschainstats::*;
pub use addressmempoolstats::*; pub use addressmempoolstats::*;
pub use addressstats::*; pub use addressstats::*;
pub use addresstypeaddressindexoutpoint::*;
pub use addresstypeaddressindextxindex::*;
pub use anyaddressindex::*; pub use anyaddressindex::*;
pub use bitcoin::*; pub use bitcoin::*;
pub use blkmetadata::*; pub use blkmetadata::*;
@@ -190,8 +192,6 @@ pub use txoutindex::*;
pub use txstatus::*; pub use txstatus::*;
pub use txversion::*; pub use txversion::*;
pub use typeindex::*; pub use typeindex::*;
pub use typeindexandoutpoint::*;
pub use typeindexandtxindex::*;
pub use unit::*; pub use unit::*;
pub use unknownoutputindex::*; pub use unknownoutputindex::*;
pub use vin::*; pub use vin::*;

View File

@@ -20,6 +20,7 @@ use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
KnownLayout, KnownLayout,
Serialize, Serialize,
JsonSchema, JsonSchema,
Hash,
)] )]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[strum(serialize_all = "lowercase")] #[strum(serialize_all = "lowercase")]
@@ -906,3 +907,10 @@ impl TryFrom<OutputType> for AddressType {
}) })
} }
} }
impl From<&[u8]> for OutputType {
#[inline]
fn from(value: &[u8]) -> Self {
Self::read_from_bytes(value).unwrap()
}
}

View File

@@ -2,7 +2,6 @@ use std::{borrow::Cow, str};
use byteview::ByteView; use byteview::ByteView;
use derive_deref::Deref; use derive_deref::Deref;
use redb::{TypeName, Value};
use serde::Serialize; use serde::Serialize;
use vecdb::PrintableIndex; use vecdb::PrintableIndex;
@@ -68,36 +67,3 @@ impl PrintableIndex for StoredString {
&["string"] &["string"]
} }
} }
impl Value for StoredString {
type SelfType<'a>
= StoredString
where
Self: 'a;
type AsBytes<'a>
= &'a str
where
Self: 'a;
fn fixed_width() -> Option<usize> {
None
}
fn from_bytes<'a>(data: &'a [u8]) -> StoredString
where
Self: 'a,
{
StoredString(str::from_utf8(data).unwrap().to_string())
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> &'a str
where
Self: 'b,
{
value.as_str()
}
fn type_name() -> TypeName {
TypeName::new("StoredString")
}
}

View File

@@ -1,8 +1,5 @@
use std::{cmp::Ordering, mem};
use byteview::ByteView; use byteview::ByteView;
use derive_deref::Deref; use derive_deref::Deref;
use redb::{Key, TypeName, Value};
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::copy_first_8bytes; use crate::copy_first_8bytes;
@@ -67,40 +64,3 @@ impl From<[u8; 8]> for TxidPrefix {
Self(u64::from_ne_bytes(value)) Self(u64::from_ne_bytes(value))
} }
} }
impl Value for TxidPrefix {
type SelfType<'a> = TxidPrefix;
type AsBytes<'a>
= [u8; mem::size_of::<u64>()]
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(mem::size_of::<u64>())
}
fn from_bytes<'a>(data: &'a [u8]) -> TxidPrefix
where
Self: 'a,
{
TxidPrefix(u64::from_le_bytes(data.try_into().unwrap()))
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> [u8; mem::size_of::<u64>()]
where
Self: 'a,
Self: 'b,
{
value.0.to_le_bytes()
}
fn type_name() -> TypeName {
TypeName::new("TxidPrefix")
}
}
impl Key for TxidPrefix {
fn compare(data1: &[u8], data2: &[u8]) -> Ordering {
Self::from_bytes(data1).cmp(&Self::from_bytes(data2))
}
}

View File

@@ -1,12 +1,8 @@
use std::{ use std::ops::{Add, AddAssign};
mem,
ops::{Add, AddAssign},
};
use allocative::Allocative; use allocative::Allocative;
use byteview::ByteView; use byteview::ByteView;
use derive_deref::{Deref, DerefMut}; use derive_deref::{Deref, DerefMut};
use redb::{TypeName, Value};
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::Serialize; use serde::Serialize;
use vecdb::{CheckedSub, PrintableIndex, StoredCompressed}; use vecdb::{CheckedSub, PrintableIndex, StoredCompressed};
@@ -170,34 +166,3 @@ impl std::fmt::Display for TxIndex {
f.write_str(str) f.write_str(str)
} }
} }
impl Value for TxIndex {
type SelfType<'a> = TxIndex;
type AsBytes<'a>
= [u8; mem::size_of::<u32>()]
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(mem::size_of::<u32>())
}
fn from_bytes<'a>(data: &'a [u8]) -> TxIndex
where
Self: 'a,
{
TxIndex(u32::from_le_bytes(data.try_into().unwrap()))
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> [u8; mem::size_of::<u32>()]
where
Self: 'a,
Self: 'b,
{
value.0.to_le_bytes()
}
fn type_name() -> TypeName {
TypeName::new("TxIndex")
}
}

View File

@@ -1,7 +1,6 @@
use std::{mem, ops::Add}; use std::ops::Add;
use byteview::ByteView; use byteview::ByteView;
use redb::{TypeName, Value};
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, StoredCompressed}; use vecdb::{CheckedSub, StoredCompressed};
@@ -151,34 +150,3 @@ impl std::fmt::Display for TypeIndex {
f.write_str(str) f.write_str(str)
} }
} }
impl Value for TypeIndex {
type SelfType<'a> = TypeIndex;
type AsBytes<'a>
= [u8; mem::size_of::<u32>()]
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(mem::size_of::<u32>())
}
fn from_bytes<'a>(data: &'a [u8]) -> TypeIndex
where
Self: 'a,
{
TypeIndex(u32::from_le_bytes(data.try_into().unwrap()))
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> [u8; mem::size_of::<u32>()]
where
Self: 'a,
Self: 'b,
{
value.0.to_le_bytes()
}
fn type_name() -> TypeName {
TypeName::new("TypeIndex")
}
}

View File

@@ -1,114 +0,0 @@
use std::{
cmp::Ordering,
hash::{Hash, Hasher},
};
use byteview::ByteView;
use redb::{Key, TypeName, Value};
use serde::Serialize;
use zerocopy::IntoBytes;
use crate::{TypeIndexAndTxIndex, Vout};
use super::{OutPoint, TypeIndex};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Serialize)]
#[repr(C)]
pub struct TypeIndexAndOutPoint {
typeindexandtxindex: TypeIndexAndTxIndex, // u64
vout: Vout, // u16
}
impl Hash for TypeIndexAndOutPoint {
fn hash<H: Hasher>(&self, state: &mut H) {
let mut buf = [0u8; 10];
buf[..8].copy_from_slice(self.typeindexandtxindex.as_bytes());
buf[8..].copy_from_slice(self.vout.as_bytes());
state.write(&buf);
}
}
impl From<(TypeIndex, OutPoint)> for TypeIndexAndOutPoint {
#[inline]
fn from(value: (TypeIndex, OutPoint)) -> Self {
Self {
typeindexandtxindex: TypeIndexAndTxIndex::from((value.0, value.1.txindex())),
vout: value.1.vout(),
}
}
}
impl From<ByteView> for TypeIndexAndOutPoint {
#[inline]
fn from(value: ByteView) -> Self {
Self {
typeindexandtxindex: TypeIndexAndTxIndex::from(&value[0..8]),
vout: Vout::from(&value[8..]),
}
}
}
impl From<TypeIndexAndOutPoint> for ByteView {
#[inline]
fn from(value: TypeIndexAndOutPoint) -> Self {
ByteView::from(&value)
}
}
impl From<&TypeIndexAndOutPoint> for ByteView {
#[inline]
fn from(value: &TypeIndexAndOutPoint) -> Self {
ByteView::from(
[
value.typeindexandtxindex.to_be_bytes().as_slice(),
value.vout.to_be_bytes().as_slice(),
]
.concat(),
)
}
}
impl Value for TypeIndexAndOutPoint {
type SelfType<'a> = TypeIndexAndOutPoint;
type AsBytes<'a>
= [u8; 10]
// 8 bytes (u64) + 2 bytes (u16)
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(10) // 8 + 2
}
fn from_bytes<'a>(data: &'a [u8]) -> TypeIndexAndOutPoint
where
Self: 'a,
{
TypeIndexAndOutPoint {
typeindexandtxindex: TypeIndexAndTxIndex::from_bytes(&data[0..8]),
vout: Vout::from_bytes(&data[8..10]),
}
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> [u8; 10]
where
Self: 'a,
Self: 'b,
{
let mut bytes = [0u8; 10];
bytes[0..8].copy_from_slice(&<TypeIndexAndTxIndex as redb::Value>::as_bytes(
&value.typeindexandtxindex,
));
bytes[8..10].copy_from_slice(&<Vout as redb::Value>::as_bytes(&value.vout));
bytes
}
fn type_name() -> TypeName {
TypeName::new("TypeIndexAndOutPoint")
}
}
impl Key for TypeIndexAndOutPoint {
fn compare(data1: &[u8], data2: &[u8]) -> Ordering {
Self::from_bytes(data1).cmp(&Self::from_bytes(data2))
}
}

View File

@@ -1,120 +0,0 @@
use std::{cmp::Ordering, mem};
use byteview::ByteView;
use redb::{Key, TypeName, Value};
use serde::Serialize;
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use super::{TxIndex, TypeIndex};
#[derive(
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Copy,
Default,
Serialize,
Hash,
FromBytes,
Immutable,
IntoBytes,
KnownLayout,
)]
pub struct TypeIndexAndTxIndex(u64);
impl TypeIndexAndTxIndex {
pub fn typeindex(&self) -> u32 {
(self.0 >> 32) as u32
}
pub fn txindex(&self) -> u32 {
self.0 as u32
}
pub fn to_be_bytes(&self) -> [u8; 8] {
self.0.to_be_bytes()
}
}
impl From<(TypeIndex, TxIndex)> for TypeIndexAndTxIndex {
#[inline]
fn from((typeindex, txindex): (TypeIndex, TxIndex)) -> Self {
Self((u64::from(typeindex) << 32) | u64::from(txindex))
}
}
impl From<ByteView> for TypeIndexAndTxIndex {
#[inline]
fn from(value: ByteView) -> Self {
Self::from(&*value)
}
}
impl From<&[u8]> for TypeIndexAndTxIndex {
#[inline]
fn from(value: &[u8]) -> Self {
let typeindex = TypeIndex::from(&value[0..4]);
let txindex = TxIndex::from(&value[4..8]);
Self::from((typeindex, txindex))
}
}
impl From<TypeIndexAndTxIndex> for ByteView {
#[inline]
fn from(value: TypeIndexAndTxIndex) -> Self {
ByteView::from(&value)
}
}
impl From<&TypeIndexAndTxIndex> for ByteView {
#[inline]
fn from(value: &TypeIndexAndTxIndex) -> Self {
ByteView::from(value.0.to_be_bytes().as_slice())
}
}
impl From<TypeIndexAndTxIndex> for u64 {
#[inline]
fn from(value: TypeIndexAndTxIndex) -> Self {
value.0
}
}
impl Value for TypeIndexAndTxIndex {
type SelfType<'a> = TypeIndexAndTxIndex;
type AsBytes<'a>
= [u8; mem::size_of::<u64>()]
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(mem::size_of::<u64>())
}
fn from_bytes<'a>(data: &'a [u8]) -> TypeIndexAndTxIndex
where
Self: 'a,
{
TypeIndexAndTxIndex(u64::from_le_bytes(data.try_into().unwrap()))
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> [u8; mem::size_of::<u64>()]
where
Self: 'a,
Self: 'b,
{
value.0.to_le_bytes()
}
fn type_name() -> TypeName {
TypeName::new("TypeIndexAndTxIndex")
}
}
impl Key for TypeIndexAndTxIndex {
fn compare(data1: &[u8], data2: &[u8]) -> Ordering {
Self::from_bytes(data1).cmp(&Self::from_bytes(data2))
}
}

View File

@@ -1,5 +1,4 @@
use byteview::ByteView; use byteview::ByteView;
use redb::{TypeName, Value};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Unit; pub struct Unit;
@@ -16,38 +15,3 @@ impl From<Unit> for ByteView {
Self::new(&[]) Self::new(&[])
} }
} }
impl Value for Unit {
type SelfType<'a>
= Unit
where
Self: 'a;
type AsBytes<'a>
= &'a [u8]
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(0)
}
#[allow(clippy::unused_unit, clippy::semicolon_if_nothing_returned)]
fn from_bytes<'a>(_data: &'a [u8]) -> Unit
where
Self: 'a,
{
Unit
}
#[allow(clippy::ignored_unit_patterns)]
fn as_bytes<'a, 'b: 'a>(_: &'a Self::SelfType<'b>) -> &'a [u8]
where
Self: 'b,
{
&[]
}
fn type_name() -> TypeName {
TypeName::new("Unit")
}
}

View File

@@ -1,8 +1,5 @@
use std::mem;
use allocative::Allocative; use allocative::Allocative;
use derive_deref::Deref; use derive_deref::Deref;
use redb::{TypeName, Value};
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::Serialize; use serde::Serialize;
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -110,34 +107,3 @@ impl std::fmt::Display for Vout {
self.0.fmt(f) self.0.fmt(f)
} }
} }
impl Value for Vout {
type SelfType<'a> = Vout;
type AsBytes<'a>
= [u8; mem::size_of::<u16>()]
where
Self: 'a;
fn fixed_width() -> Option<usize> {
Some(mem::size_of::<u16>())
}
fn from_bytes<'a>(data: &'a [u8]) -> Vout
where
Self: 'a,
{
Vout(u16::from_le_bytes(data.try_into().unwrap()))
}
fn as_bytes<'a, 'b: 'a>(value: &'a Self::SelfType<'b>) -> [u8; mem::size_of::<u16>()]
where
Self: 'a,
Self: 'b,
{
value.0.to_le_bytes()
}
fn type_name() -> TypeName {
TypeName::new("Vout")
}
}