This commit is contained in:
nym21
2025-11-14 12:09:58 +01:00
parent 1d2c927d94
commit e8f77ab2e5
46 changed files with 1400 additions and 1394 deletions

291
Cargo.lock generated
View File

@@ -363,6 +363,24 @@ dependencies = [
"virtue", "virtue",
] ]
[[package]]
name = "bindgen"
version = "0.72.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895"
dependencies = [
"bitflags 2.10.0",
"cexpr",
"clang-sys",
"itertools 0.13.0",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
"syn 2.0.110",
]
[[package]] [[package]]
name = "bit-set" name = "bit-set"
version = "0.8.0" version = "0.8.0"
@@ -534,6 +552,7 @@ version = "0.0.111"
dependencies = [ dependencies = [
"brk_error", "brk_error",
"brk_logger", "brk_logger",
"libproc",
"parking_lot", "parking_lot",
] ]
@@ -595,6 +614,7 @@ name = "brk_computer"
version = "0.0.111" version = "0.0.111"
dependencies = [ dependencies = [
"bitcoin", "bitcoin",
"brk_bencher",
"brk_error", "brk_error",
"brk_fetcher", "brk_fetcher",
"brk_grouper", "brk_grouper",
@@ -627,7 +647,7 @@ dependencies = [
"fjall", "fjall",
"jiff", "jiff",
"minreq", "minreq",
"sonic-rs", "serde_json",
"tokio", "tokio",
"vecdb", "vecdb",
"zerocopy", "zerocopy",
@@ -642,7 +662,7 @@ dependencies = [
"brk_types", "brk_types",
"log", "log",
"minreq", "minreq",
"sonic-rs", "serde_json",
] ]
[[package]] [[package]]
@@ -853,7 +873,7 @@ dependencies = [
"dunce", "dunce",
"futures", "futures",
"indexmap", "indexmap",
"itertools", "itertools 0.14.0",
"itoa", "itoa",
"json-escape-simd", "json-escape-simd",
"memchr", "memchr",
@@ -893,7 +913,7 @@ dependencies = [
"dashmap", "dashmap",
"derive_more", "derive_more",
"fast-glob", "fast-glob",
"itertools", "itertools 0.14.0",
"num-bigint", "num-bigint",
"oxc", "oxc",
"oxc_ecmascript", "oxc_ecmascript",
@@ -1094,7 +1114,7 @@ dependencies = [
"brk_rolldown_fs", "brk_rolldown_fs",
"brk_rolldown_utils", "brk_rolldown_utils",
"dashmap", "dashmap",
"itertools", "itertools 0.14.0",
"oxc_resolver", "oxc_resolver",
"sugar_path", "sugar_path",
] ]
@@ -1154,7 +1174,7 @@ dependencies = [
"itoa", "itoa",
"memchr", "memchr",
"mime", "mime",
"nom", "nom 8.0.0",
"oxc", "oxc",
"oxc_index", "oxc_index",
"phf", "phf",
@@ -1216,7 +1236,8 @@ dependencies = [
"log", "log",
"quick_cache", "quick_cache",
"schemars", "schemars",
"sonic-rs", "serde",
"serde_json",
"tokio", "tokio",
"tower-http", "tower-http",
"tracing", "tracing",
@@ -1273,7 +1294,7 @@ version = "0.0.111"
dependencies = [ dependencies = [
"bitcoin", "bitcoin",
"brk_error", "brk_error",
"byteview 0.6.1", "byteview 0.8.0",
"derive_deref", "derive_deref",
"itoa", "itoa",
"jiff", "jiff",
@@ -1376,6 +1397,15 @@ dependencies = [
"shlex", "shlex",
] ]
[[package]]
name = "cexpr"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom 7.1.3",
]
[[package]] [[package]]
name = "cfb" name = "cfb"
version = "0.7.3" version = "0.7.3"
@@ -1413,6 +1443,17 @@ dependencies = [
"windows-link", "windows-link",
] ]
[[package]]
name = "clang-sys"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
dependencies = [
"glob",
"libc",
"libloading",
]
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.51" version = "4.5.51"
@@ -1707,9 +1748,9 @@ checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]] [[package]]
name = "crypto-common" name = "crypto-common"
version = "0.1.6" version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [ dependencies = [
"generic-array", "generic-array",
"typenum", "typenum",
@@ -2040,18 +2081,6 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "faststr"
version = "0.2.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baec6a0289d7f1fe5665586ef7340af82e3037207bef60f5785e57569776f0c8"
dependencies = [
"bytes",
"rkyv",
"serde",
"simdutf8",
]
[[package]] [[package]]
name = "fdeflate" name = "fdeflate"
version = "0.3.7" version = "0.3.7"
@@ -2087,14 +2116,14 @@ checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
[[package]] [[package]]
name = "fjall" name = "fjall"
version = "3.0.0-pre.5" version = "3.0.0-pre.6"
dependencies = [ dependencies = [
"byteorder-lite", "byteorder-lite",
"byteview 0.8.0", "byteview 0.8.0",
"dashmap", "dashmap",
"flume", "flume",
"log", "log",
"lsm-tree 3.0.0-pre.5", "lsm-tree 3.0.0-pre.6",
"lz4_flex", "lz4_flex",
"tempfile", "tempfile",
"xxhash-rust", "xxhash-rust",
@@ -2325,9 +2354,9 @@ dependencies = [
[[package]] [[package]]
name = "generic-array" name = "generic-array"
version = "0.14.9" version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [ dependencies = [
"typenum", "typenum",
"version_check", "version_check",
@@ -2372,6 +2401,12 @@ version = "0.32.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7"
[[package]]
name = "glob"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]] [[package]]
name = "guardian" name = "guardian"
version = "1.3.0" version = "1.3.0"
@@ -2493,9 +2528,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "1.7.0" version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" checksum = "1744436df46f0bde35af3eda22aeaba453aada65d8f1c171cd8a5f59030bd69f"
dependencies = [ dependencies = [
"atomic-waker", "atomic-waker",
"bytes", "bytes",
@@ -2736,6 +2771,15 @@ version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.14.0" version = "0.14.0"
@@ -2887,6 +2931,17 @@ dependencies = [
"windows-link", "windows-link",
] ]
[[package]]
name = "libproc"
version = "0.14.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a54ad7278b8bc5301d5ffd2a94251c004feb971feba96c971ea4063645990757"
dependencies = [
"bindgen",
"errno",
"libc",
]
[[package]] [[package]]
name = "libredox" name = "libredox"
version = "0.1.10" version = "0.1.10"
@@ -2960,7 +3015,7 @@ dependencies = [
[[package]] [[package]]
name = "lsm-tree" name = "lsm-tree"
version = "3.0.0-pre.5" version = "3.0.0-pre.6"
dependencies = [ dependencies = [
"byteorder-lite", "byteorder-lite",
"byteview 0.8.0", "byteview 0.8.0",
@@ -3014,6 +3069,12 @@ version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]] [[package]]
name = "miniz_oxide" name = "miniz_oxide"
version = "0.8.9" version = "0.8.9"
@@ -3049,26 +3110,6 @@ dependencies = [
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "munge"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e17401f259eba956ca16491461b6e8f72913a0a114e39736ce404410f915a0c"
dependencies = [
"munge_macro",
]
[[package]]
name = "munge_macro"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4568f25ccbd45ab5d5603dc34318c1ec56b117531781260002151b8530a9f931"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]] [[package]]
name = "nibble_vec" name = "nibble_vec"
version = "0.1.0" version = "0.1.0"
@@ -3090,6 +3131,16 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]] [[package]]
name = "nom" name = "nom"
version = "8.0.0" version = "8.0.0"
@@ -3280,7 +3331,7 @@ checksum = "f978be538ca5e2a64326d24b7991dc658cc8495132833ae387212ab3b8abd70a"
dependencies = [ dependencies = [
"bincode", "bincode",
"flate2", "flate2",
"nom", "nom 8.0.0",
"rustc-hash", "rustc-hash",
"serde", "serde",
"serde_json", "serde_json",
@@ -3377,7 +3428,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "388f6e0dd1a825de3a11d94af540726539cccd428651fee3bab841da196549d4" checksum = "388f6e0dd1a825de3a11d94af540726539cccd428651fee3bab841da196549d4"
dependencies = [ dependencies = [
"bitflags 2.10.0", "bitflags 2.10.0",
"itertools", "itertools 0.14.0",
"oxc_index", "oxc_index",
"oxc_syntax", "oxc_syntax",
"petgraph", "petgraph",
@@ -3498,7 +3549,7 @@ version = "0.96.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba3695e3b0694093d24f0f7c8b77dc36b1a4b55020bf52166271010c8095bba5" checksum = "ba3695e3b0694093d24f0f7c8b77dc36b1a4b55020bf52166271010c8095bba5"
dependencies = [ dependencies = [
"itertools", "itertools 0.14.0",
"oxc_allocator", "oxc_allocator",
"oxc_ast", "oxc_ast",
"oxc_data_structures", "oxc_data_structures",
@@ -3575,9 +3626,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_resolver" name = "oxc_resolver"
version = "11.13.1" version = "11.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ab3f270c313ac7f814ce73a64f4bdf36a183dcae4593b2f96c6e6afea8c99d0" checksum = "0f1af25f894076eedc44509ad0cc33afb829aa06ec3f23e395f47bcbc1c6e964"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"indexmap", "indexmap",
@@ -3604,7 +3655,7 @@ version = "0.96.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c56958658ca1f9f5f050dc4317821255d2ca132763b6fbee9227e45ef79ed173" checksum = "c56958658ca1f9f5f050dc4317821255d2ca132763b6fbee9227e45ef79ed173"
dependencies = [ dependencies = [
"itertools", "itertools 0.14.0",
"oxc_allocator", "oxc_allocator",
"oxc_ast", "oxc_ast",
"oxc_ast_visit", "oxc_ast_visit",
@@ -4065,26 +4116,6 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "ptr_meta"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b9a0cf95a1196af61d4f1cbdab967179516d9a4a4312af1f31948f8f6224a79"
dependencies = [
"ptr_meta_derive",
]
[[package]]
name = "ptr_meta_derive"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7347867d0a7e1208d93b46767be83e2b8f978c3dad35f775ac8d8847551d6fe1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]] [[package]]
name = "quick_cache" name = "quick_cache"
version = "0.6.18" version = "0.6.18"
@@ -4131,15 +4162,6 @@ dependencies = [
"nibble_vec", "nibble_vec",
] ]
[[package]]
name = "rancor"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a063ea72381527c2a0561da9c80000ef822bdd7c3241b1cc1b12100e3df081ee"
dependencies = [
"ptr_meta",
]
[[package]] [[package]]
name = "rand" name = "rand"
version = "0.8.5" version = "0.8.5"
@@ -4219,7 +4241,7 @@ dependencies = [
[[package]] [[package]]
name = "rawdb" name = "rawdb"
version = "0.3.16" version = "0.3.20"
dependencies = [ dependencies = [
"libc", "libc",
"log", "log",
@@ -4327,12 +4349,6 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "rend"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cadadef317c2f20755a64d7fdc48f9e7178ee6b0e1f7fce33fa60f1d68a276e6"
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.17.14" version = "0.17.14"
@@ -4347,35 +4363,6 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "rkyv"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35a640b26f007713818e9a9b65d34da1cf58538207b052916a83d80e43f3ffa4"
dependencies = [
"bytes",
"hashbrown 0.15.5",
"indexmap",
"munge",
"ptr_meta",
"rancor",
"rend",
"rkyv_derive",
"tinyvec",
"uuid",
]
[[package]]
name = "rkyv_derive"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd83f5f173ff41e00337d97f6572e416d022ef8a19f371817259ae960324c482"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]] [[package]]
name = "rolldown-ariadne" name = "rolldown-ariadne"
version = "0.5.3" version = "0.5.3"
@@ -4816,45 +4803,6 @@ dependencies = [
"windows-sys 0.60.2", "windows-sys 0.60.2",
] ]
[[package]]
name = "sonic-number"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8a74044c092f4f43ca7a6cfd62854cf9fb5ac8502b131347c990bf22bef1dfe"
dependencies = [
"cfg-if",
]
[[package]]
name = "sonic-rs"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4425ea8d66ec950e0a8f2ef52c766cc3d68d661d9a0845c353c40833179fd866"
dependencies = [
"ahash",
"bumpalo",
"bytes",
"cfg-if",
"faststr",
"itoa",
"ref-cast",
"ryu",
"serde",
"simdutf8",
"sonic-number",
"sonic-simd",
"thiserror 2.0.17",
]
[[package]]
name = "sonic-simd"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5707edbfb34a40c9f2a55fa09a49101d9fec4e0cc171ce386086bd9616f34257"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "spin" name = "spin"
version = "0.9.8" version = "0.9.8"
@@ -5087,21 +5035,6 @@ dependencies = [
"zerovec", "zerovec",
] ]
[[package]]
name = "tinyvec"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa"
dependencies = [
"tinyvec_macros",
]
[[package]]
name = "tinyvec_macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.48.0" version = "1.48.0"
@@ -5524,7 +5457,7 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]] [[package]]
name = "vecdb" name = "vecdb"
version = "0.3.16" version = "0.3.20"
dependencies = [ dependencies = [
"ctrlc", "ctrlc",
"log", "log",
@@ -5533,14 +5466,14 @@ dependencies = [
"rawdb", "rawdb",
"serde", "serde",
"serde_derive", "serde_derive",
"sonic-rs", "serde_json",
"vecdb_derive", "vecdb_derive",
"zerocopy", "zerocopy",
] ]
[[package]] [[package]]
name = "vecdb_derive" name = "vecdb_derive"
version = "0.3.16" version = "0.3.20"
dependencies = [ dependencies = [
"quote", "quote",
"syn 2.0.110", "syn 2.0.110",
@@ -5661,9 +5594,9 @@ checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1"
[[package]] [[package]]
name = "weezl" name = "weezl"
version = "0.1.11" version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "009936b22a61d342859b5f0ea64681cbb35a358ab548e2a44a8cf0dac2d980b8" checksum = "a28ac98ddc8b9274cb41bb4d9d4d5c425b6020c50c46f25559911905610b4a88"
[[package]] [[package]]
name = "winapi" name = "winapi"

View File

@@ -58,8 +58,8 @@ brk_store = { version = "0.0.111", path = "crates/brk_store" }
brk_types = { version = "0.0.111", path = "crates/brk_types" } brk_types = { version = "0.0.111", path = "crates/brk_types" }
brk_traversable = { version = "0.0.111", path = "crates/brk_traversable", features = ["derive"] } brk_traversable = { version = "0.0.111", path = "crates/brk_traversable", features = ["derive"] }
brk_traversable_derive = { version = "0.0.111", path = "crates/brk_traversable_derive" } brk_traversable_derive = { version = "0.0.111", path = "crates/brk_traversable_derive" }
byteview = "=0.6.1" # byteview = "=0.6.1"
# byteview = "~0.8.0" byteview = "~0.8.0"
derive_deref = "1.1.1" derive_deref = "1.1.1"
# fjall2 = { version = "2.11.5", package = "brk_fjall" } # fjall2 = { version = "2.11.5", package = "brk_fjall" }
fjall2 = { path = "../fjall2", package = "brk_fjall" } fjall2 = { path = "../fjall2", package = "brk_fjall" }
@@ -78,10 +78,9 @@ serde = "1.0.228"
serde_bytes = "0.11.19" serde_bytes = "0.11.19"
serde_derive = "1.0.228" serde_derive = "1.0.228"
serde_json = { version = "1.0.145", features = ["float_roundtrip"] } serde_json = { version = "1.0.145", features = ["float_roundtrip"] }
sonic-rs = "0.5.6"
tokio = { version = "1.48.0", features = ["rt-multi-thread"] } tokio = { version = "1.48.0", features = ["rt-multi-thread"] }
vecdb = { path = "../seqdb/crates/vecdb", features = ["derive"] } vecdb = { path = "../seqdb/crates/vecdb", features = ["derive"] }
# vecdb = { version = "0.3.16", features = ["derive"] } # vecdb = { version = "0.3.20", features = ["derive"] }
zerocopy = { version = "0.8.27", features = ["derive"] } zerocopy = { version = "0.8.27", features = ["derive"] }
[workspace.metadata.release] [workspace.metadata.release]

View File

@@ -13,3 +13,6 @@ build = "build.rs"
brk_error = { workspace = true } brk_error = { workspace = true }
brk_logger = { workspace = true } brk_logger = { workspace = true }
parking_lot = { workspace = true } parking_lot = { workspace = true }
[target.'cfg(target_os = "macos")'.dependencies]
libproc = "0.14"

View File

@@ -1,24 +1,34 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::fs; use std::fs::{self, File};
use std::io; use std::io::{self, Write};
use std::os::unix::fs::MetadataExt; use std::os::unix::fs::MetadataExt;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::time::SystemTime; use std::time::SystemTime;
pub struct DiskMonitor { pub struct DiskMonitor {
cache: HashMap<PathBuf, (u64, SystemTime)>, // path -> (bytes_used, mtime) cache: HashMap<PathBuf, (u64, SystemTime)>, // path -> (bytes_used, mtime)
monitored_path: PathBuf,
writer: File,
} }
impl DiskMonitor { impl DiskMonitor {
pub fn new() -> Self { pub fn new(monitored_path: &Path, csv_path: &Path) -> io::Result<Self> {
Self { let mut writer = File::create(csv_path)?;
writeln!(writer, "timestamp_ms,disk_usage")?;
Ok(Self {
cache: HashMap::new(), cache: HashMap::new(),
} monitored_path: monitored_path.to_path_buf(),
writer,
})
} }
/// Get disk usage in bytes (matches `du` and Finder) /// Record disk usage at the given timestamp
pub fn get_disk_usage(&mut self, path: &Path) -> io::Result<u64> { pub fn record(&mut self, elapsed_ms: u128) -> io::Result<()> {
self.scan_recursive(path) if let Ok(bytes) = self.scan_recursive(&self.monitored_path.clone()) {
writeln!(self.writer, "{},{}", elapsed_ms, bytes)?;
}
Ok(())
} }
fn scan_recursive(&mut self, path: &Path) -> io::Result<u64> { fn scan_recursive(&mut self, path: &Path) -> io::Result<u64> {

View File

@@ -0,0 +1,81 @@
use std::fs::File;
use std::io::{self, Write};
use std::path::Path;
#[cfg(target_os = "linux")]
use std::fs;
#[cfg(target_os = "macos")]
use libproc::pid_rusage::{pidrusage, RUsageInfoV2};
pub struct IoMonitor {
pid: u32,
writer: File,
}
impl IoMonitor {
pub fn new(pid: u32, csv_path: &Path) -> io::Result<Self> {
let mut writer = File::create(csv_path)?;
writeln!(writer, "timestamp_ms,bytes_read,bytes_written")?;
Ok(Self { pid, writer })
}
/// Record I/O usage at the given timestamp
pub fn record(&mut self, elapsed_ms: u128) -> io::Result<()> {
if let Ok((read, written)) = self.get_io_usage() {
writeln!(self.writer, "{},{},{}", elapsed_ms, read, written)?;
}
Ok(())
}
/// Get I/O usage in bytes
/// Returns (bytes_read, bytes_written)
fn get_io_usage(&self) -> io::Result<(u64, u64)> {
#[cfg(target_os = "linux")]
{
self.get_io_usage_linux()
}
#[cfg(target_os = "macos")]
{
self.get_io_usage_macos()
}
}
#[cfg(target_os = "linux")]
fn get_io_usage_linux(&self) -> io::Result<(u64, u64)> {
let io_content = fs::read_to_string(format!("/proc/{}/io", self.pid))?;
let mut read_bytes = None;
let mut write_bytes = None;
for line in io_content.lines() {
if line.starts_with("read_bytes:") {
if let Some(value_str) = line.split_whitespace().nth(1) {
read_bytes = value_str.parse::<u64>().ok();
}
} else if line.starts_with("write_bytes:") {
if let Some(value_str) = line.split_whitespace().nth(1) {
write_bytes = value_str.parse::<u64>().ok();
}
}
}
match (read_bytes, write_bytes) {
(Some(r), Some(w)) => Ok((r, w)),
_ => Err(io::Error::new(
io::ErrorKind::InvalidData,
"Failed to parse I/O stats from /proc/[pid]/io",
)),
}
}
#[cfg(target_os = "macos")]
fn get_io_usage_macos(&self) -> io::Result<(u64, u64)> {
match pidrusage::<RUsageInfoV2>(self.pid as i32) {
Ok(info) => Ok((info.ri_diskio_bytesread, info.ri_diskio_byteswritten)),
Err(_) => Err(io::Error::other("Failed to get process I/O stats")),
}
}
}

View File

@@ -1,6 +1,5 @@
use std::{ use std::{
fs, fs,
io::{self, Write},
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::{ sync::{
Arc, Arc,
@@ -13,10 +12,12 @@ use std::{
use brk_error::Result; use brk_error::Result;
mod disk; mod disk;
mod io;
mod memory; mod memory;
mod progression; mod progression;
use disk::*; use disk::*;
use io::*;
use memory::*; use memory::*;
use parking_lot::Mutex; use parking_lot::Mutex;
use progression::*; use progression::*;
@@ -53,7 +54,7 @@ impl Bencher {
brk_logger::register_hook(move |message| { brk_logger::register_hook(move |message| {
progression_clone.check_and_record(message); progression_clone.check_and_record(message);
}) })
.map_err(|e| io::Error::new(io::ErrorKind::AlreadyExists, e))?; .map_err(|e| std::io::Error::new(std::io::ErrorKind::AlreadyExists, e))?;
Ok(Self(Arc::new(BencherInner { Ok(Self(Arc::new(BencherInner {
bench_dir, bench_dir,
@@ -132,36 +133,20 @@ fn monitor_resources(
bench_dir: &Path, bench_dir: &Path,
stop_flag: Arc<AtomicBool>, stop_flag: Arc<AtomicBool>,
) -> Result<()> { ) -> Result<()> {
let disk_file = bench_dir.join("disk.csv");
let memory_file = bench_dir.join("memory.csv");
let mut disk_writer = fs::File::create(disk_file)?;
let mut memory_writer = fs::File::create(memory_file)?;
writeln!(disk_writer, "timestamp_ms,disk_usage")?;
writeln!(
memory_writer,
"timestamp_ms,phys_footprint,phys_footprint_peak"
)?;
let pid = std::process::id(); let pid = std::process::id();
let start = Instant::now(); let start = Instant::now();
let mut disk_monitor = DiskMonitor::new(); let mut disk_monitor = DiskMonitor::new(monitored_path, &bench_dir.join("disk.csv"))?;
let memory_monitor = MemoryMonitor::new(pid); let mut memory_monitor = MemoryMonitor::new(pid, &bench_dir.join("memory.csv"))?;
let mut io_monitor = IoMonitor::new(pid, &bench_dir.join("io.csv"))?;
'l: loop { 'l: loop {
let elapsed_ms = start.elapsed().as_millis(); let elapsed_ms = start.elapsed().as_millis();
if let Ok(bytes) = disk_monitor.get_disk_usage(monitored_path) { disk_monitor.record(elapsed_ms)?;
writeln!(disk_writer, "{},{}", elapsed_ms, bytes)?; memory_monitor.record(elapsed_ms)?;
} io_monitor.record(elapsed_ms)?;
if let Ok((footprint, peak)) = memory_monitor.get_memory_usage() {
writeln!(memory_writer, "{},{},{}", elapsed_ms, footprint, peak)?;
}
// Best version
for _ in 0..50 { for _ in 0..50 {
// 50 * 100ms = 5 seconds // 50 * 100ms = 5 seconds
if stop_flag.load(Ordering::Relaxed) { if stop_flag.load(Ordering::Relaxed) {

View File

@@ -1,4 +1,6 @@
use std::io; use std::fs::File;
use std::io::{self, Write};
use std::path::Path;
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
use std::fs; use std::fs;
@@ -8,16 +10,28 @@ use std::process::Command;
pub struct MemoryMonitor { pub struct MemoryMonitor {
pid: u32, pid: u32,
writer: File,
} }
impl MemoryMonitor { impl MemoryMonitor {
pub fn new(pid: u32) -> Self { pub fn new(pid: u32, csv_path: &Path) -> io::Result<Self> {
Self { pid } let mut writer = File::create(csv_path)?;
writeln!(writer, "timestamp_ms,phys_footprint,phys_footprint_peak")?;
Ok(Self { pid, writer })
}
/// Record memory usage at the given timestamp
pub fn record(&mut self, elapsed_ms: u128) -> io::Result<()> {
if let Ok((footprint, peak)) = self.get_memory_usage() {
writeln!(self.writer, "{},{},{}", elapsed_ms, footprint, peak)?;
}
Ok(())
} }
/// Get memory usage in bytes /// Get memory usage in bytes
/// Returns (current_bytes, peak_bytes) /// Returns (current_bytes, peak_bytes)
pub fn get_memory_usage(&self) -> io::Result<(u64, u64)> { fn get_memory_usage(&self) -> io::Result<(u64, u64)> {
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
{ {
self.get_memory_usage_linux() self.get_memory_usage_linux()

View File

@@ -54,8 +54,8 @@ const CHART_COLORS: [RGBColor; 6] = [
RGBColor(255, 159, 64), // Orange RGBColor(255, 159, 64), // Orange
]; ];
// Time window buffer in milliseconds (5 seconds) // Time window buffer in milliseconds
const TIME_BUFFER_MS: u64 = 5000; const TIME_BUFFER_MS: u64 = 10_000;
pub struct Visualizer { pub struct Visualizer {
workspace_root: PathBuf, workspace_root: PathBuf,
@@ -106,6 +106,7 @@ impl Visualizer {
let disk_runs = self.read_benchmark_runs(crate_path, "disk.csv")?; let disk_runs = self.read_benchmark_runs(crate_path, "disk.csv")?;
let memory_runs = self.read_benchmark_runs(crate_path, "memory.csv")?; let memory_runs = self.read_benchmark_runs(crate_path, "memory.csv")?;
let progress_runs = self.read_benchmark_runs(crate_path, "progress.csv")?; let progress_runs = self.read_benchmark_runs(crate_path, "progress.csv")?;
let io_runs = self.read_benchmark_runs(crate_path, "io.csv")?;
// Generate combined charts (all runs together) // Generate combined charts (all runs together)
if !disk_runs.is_empty() { if !disk_runs.is_empty() {
@@ -120,6 +121,11 @@ impl Visualizer {
self.generate_progress_chart(crate_path, crate_name, &progress_runs)?; self.generate_progress_chart(crate_path, crate_name, &progress_runs)?;
} }
if !io_runs.is_empty() {
self.generate_io_read_chart(crate_path, crate_name, &io_runs)?;
self.generate_io_write_chart(crate_path, crate_name, &io_runs)?;
}
// Generate individual charts for each run // Generate individual charts for each run
for run in &disk_runs { for run in &disk_runs {
let run_path = crate_path.join(&run.run_id); let run_path = crate_path.join(&run.run_id);
@@ -136,6 +142,12 @@ impl Visualizer {
self.generate_progress_chart(&run_path, crate_name, slice::from_ref(run))?; self.generate_progress_chart(&run_path, crate_name, slice::from_ref(run))?;
} }
for run in &io_runs {
let run_path = crate_path.join(&run.run_id);
self.generate_io_read_chart(&run_path, crate_name, slice::from_ref(run))?;
self.generate_io_write_chart(&run_path, crate_name, slice::from_ref(run))?;
}
Ok(()) Ok(())
} }
@@ -153,8 +165,8 @@ impl Visualizer {
.ok_or("Invalid run ID")? .ok_or("Invalid run ID")?
.to_string(); .to_string();
// Skip directories that start with underscore // Skip directories that start with underscore or contain only numbers
if run_id.starts_with('_') { if run_id.starts_with('_') || run_id.chars().all(|c| c.is_ascii_digit()) {
continue; continue;
} }
@@ -557,6 +569,223 @@ impl Visualizer {
Ok(enhanced_runs) Ok(enhanced_runs)
} }
#[allow(clippy::type_complexity)]
fn read_io_data(
&self,
crate_path: &Path,
runs: &[BenchmarkRun],
) -> Result<Vec<(String, Vec<DataPoint>, Vec<DataPoint>)>> {
let mut io_runs = Vec::new();
for run in runs {
// For individual charts, crate_path is already the run folder
// For combined charts, we need to append run_id
let direct_path = crate_path.join("io.csv");
let nested_path = crate_path.join(&run.run_id).join("io.csv");
let csv_path = if direct_path.exists() {
direct_path
} else {
nested_path
};
if let Ok(content) = fs::read_to_string(&csv_path) {
let mut read_data = Vec::new();
let mut write_data = Vec::new();
for (i, line) in content.lines().enumerate() {
if i == 0 {
continue;
}
let parts: Vec<&str> = line.split(',').collect();
if parts.len() >= 3
&& let (Ok(timestamp_ms), Ok(bytes_read), Ok(bytes_written)) = (
parts[0].parse::<u64>(),
parts[1].parse::<f64>(),
parts[2].parse::<f64>(),
)
{
read_data.push(DataPoint {
timestamp_ms,
value: bytes_read,
});
write_data.push(DataPoint {
timestamp_ms,
value: bytes_written,
});
}
}
io_runs.push((run.run_id.clone(), read_data, write_data));
}
}
Ok(io_runs)
}
fn generate_io_read_chart(
&self,
crate_path: &Path,
crate_name: &str,
runs: &[BenchmarkRun],
) -> Result<()> {
let output_path = crate_path.join("io_read_chart.svg");
let root = SVGBackend::new(&output_path, SIZE).into_drawing_area();
root.fill(&BG_COLOR)?;
// Calculate time window based on shortest run + buffer
let min_max_time_ms = Self::calculate_min_max_time(runs) + TIME_BUFFER_MS;
let max_time_s = (min_max_time_ms as f64) / 1000.0;
// Read I/O CSV files which have 3 columns: timestamp, bytes_read, bytes_written
let io_runs = self.read_io_data(crate_path, runs)?;
// Trim I/O runs to the same time window and extract only read data
let trimmed_io_runs: Vec<_> = io_runs
.into_iter()
.map(|(run_id, read_data, _write_data)| {
let trimmed_read: Vec<_> = read_data
.into_iter()
.filter(|d| d.timestamp_ms <= min_max_time_ms)
.collect();
(run_id, trimmed_read)
})
.collect();
let max_value = trimmed_io_runs
.iter()
.flat_map(|(_, data)| data.iter().map(|d| d.value))
.fold(0.0_f64, f64::max);
let (max_value_scaled, unit) = Self::format_bytes(max_value);
let scale_factor = max_value / max_value_scaled;
// Format time based on duration
let (max_time_scaled, _time_unit, time_label) = Self::format_time(max_time_s);
let mut chart = ChartBuilder::on(&root)
.caption(
format!("{} — I/O Read", crate_name),
(FONT, FONT_SIZE_BIG).into_font().color(&TEXT_COLOR),
)
.margin(20)
.x_label_area_size(50)
.margin_left(50)
.right_y_label_area_size(75)
.build_cartesian_2d(0.0..max_time_scaled * 1.025, 0.0..max_value_scaled * 1.1)?;
configure_chart_mesh!(
chart,
time_label,
format!("Bytes Read ({})", unit),
|y: &f64| format!("{:.2}", y)
);
let time_divisor = max_time_s / max_time_scaled;
for (idx, (run_id, read_data)) in trimmed_io_runs.iter().enumerate() {
let color = CHART_COLORS[idx % CHART_COLORS.len()];
Self::draw_line_series(
&mut chart,
read_data.iter().map(|d| {
(
d.timestamp_ms as f64 / 1000.0 / time_divisor,
d.value / scale_factor,
)
}),
run_id,
color,
)?;
}
Self::configure_series_labels(&mut chart)?;
root.present()?;
println!("Generated: {}", output_path.display());
Ok(())
}
fn generate_io_write_chart(
&self,
crate_path: &Path,
crate_name: &str,
runs: &[BenchmarkRun],
) -> Result<()> {
let output_path = crate_path.join("io_write_chart.svg");
let root = SVGBackend::new(&output_path, SIZE).into_drawing_area();
root.fill(&BG_COLOR)?;
// Calculate time window based on shortest run + buffer
let min_max_time_ms = Self::calculate_min_max_time(runs) + TIME_BUFFER_MS;
let max_time_s = (min_max_time_ms as f64) / 1000.0;
// Read I/O CSV files which have 3 columns: timestamp, bytes_read, bytes_written
let io_runs = self.read_io_data(crate_path, runs)?;
// Trim I/O runs to the same time window and extract only write data
let trimmed_io_runs: Vec<_> = io_runs
.into_iter()
.map(|(run_id, _read_data, write_data)| {
let trimmed_write: Vec<_> = write_data
.into_iter()
.filter(|d| d.timestamp_ms <= min_max_time_ms)
.collect();
(run_id, trimmed_write)
})
.collect();
let max_value = trimmed_io_runs
.iter()
.flat_map(|(_, data)| data.iter().map(|d| d.value))
.fold(0.0_f64, f64::max);
let (max_value_scaled, unit) = Self::format_bytes(max_value);
let scale_factor = max_value / max_value_scaled;
// Format time based on duration
let (max_time_scaled, _time_unit, time_label) = Self::format_time(max_time_s);
let mut chart = ChartBuilder::on(&root)
.caption(
format!("{} — I/O Write", crate_name),
(FONT, FONT_SIZE_BIG).into_font().color(&TEXT_COLOR),
)
.margin(20)
.x_label_area_size(50)
.margin_left(50)
.right_y_label_area_size(75)
.build_cartesian_2d(0.0..max_time_scaled * 1.025, 0.0..max_value_scaled * 1.1)?;
configure_chart_mesh!(
chart,
time_label,
format!("Bytes Written ({})", unit),
|y: &f64| format!("{:.2}", y)
);
let time_divisor = max_time_s / max_time_scaled;
for (idx, (run_id, write_data)) in trimmed_io_runs.iter().enumerate() {
let color = CHART_COLORS[idx % CHART_COLORS.len()];
Self::draw_line_series(
&mut chart,
write_data.iter().map(|d| {
(
d.timestamp_ms as f64 / 1000.0 / time_divisor,
d.value / scale_factor,
)
}),
run_id,
color,
)?;
}
Self::configure_series_labels(&mut chart)?;
root.present()?;
println!("Generated: {}", output_path.display());
Ok(())
}
fn generate_progress_chart( fn generate_progress_chart(
&self, &self,
crate_path: &Path, crate_path: &Path,

View File

@@ -11,6 +11,7 @@ build = "build.rs"
[dependencies] [dependencies]
bitcoin = { workspace = true } bitcoin = { workspace = true }
brk_bencher = { workspace = true }
brk_error = { workspace = true } brk_error = { workspace = true }
brk_fetcher = { workspace = true } brk_fetcher = { workspace = true }
brk_grouper = { workspace = true } brk_grouper = { workspace = true }

View File

@@ -0,0 +1,73 @@
use std::{env, path::Path, thread, time::Instant};
use brk_bencher::Bencher;
use brk_computer::Computer;
use brk_error::Result;
use brk_fetcher::Fetcher;
use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use log::{debug, info};
use vecdb::Exit;
pub fn main() -> Result<()> {
// Can't increase main thread's stack size, thus we need to use another thread
thread::Builder::new()
.stack_size(512 * 1024 * 1024)
.spawn(run)?
.join()
.unwrap()
}
fn run() -> Result<()> {
brk_logger::init(None)?;
let bitcoin_dir = Client::default_bitcoin_path();
// let bitcoin_dir = Path::new("/Volumes/WD_BLACK/bitcoin");
let outputs_dir = Path::new(&env::var("HOME").unwrap()).join(".brk");
let outputs_benches_dir = outputs_dir.join("benches");
// let outputs_dir = Path::new("../../_outputs");
let client = Client::new(
Client::default_url(),
Auth::CookieFile(bitcoin_dir.join(".cookie")),
)?;
let reader = Reader::new(bitcoin_dir.join("blocks"), &client);
let blocks = Blocks::new(&client, &reader);
let mut indexer = Indexer::forced_import(&outputs_dir)?;
let fetcher = Fetcher::import(true, None)?;
let mut computer = Computer::forced_import(&outputs_benches_dir, &indexer, Some(fetcher))?;
let mut bencher =
Bencher::from_cargo_env(env!("CARGO_PKG_NAME"), &outputs_dir.join("computed"))?;
bencher.start()?;
let exit = Exit::new();
exit.set_ctrlc_handler();
let bencher_clone = bencher.clone();
exit.register_cleanup(move || {
let _ = bencher_clone.stop();
debug!("Bench stopped.");
});
let i = Instant::now();
let starting_indexes = indexer.checked_index(&blocks, &client, &exit)?;
info!("Done in {:?}", i.elapsed());
let i = Instant::now();
computer.compute(&indexer, starting_indexes, &reader, &exit)?;
info!("Done in {:?}", i.elapsed());
// We want to benchmark the drop too
drop(computer);
drop(indexer);
Ok(())
}

View File

@@ -48,6 +48,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }

View File

@@ -10,8 +10,8 @@ use brk_types::{
TxVersion, Version, WeekIndex, Weight, YearIndex, TxVersion, Version, WeekIndex, Weight, YearIndex,
}; };
use vecdb::{ use vecdb::{
Database, EagerVec, Exit, IterableCloneableVec, IterableVec, LazyVecFrom1, LazyVecFrom2, AnyVec, Database, EagerVec, Exit, GenericStoredVec, IterableCloneableVec, IterableVec,
LazyVecFrom3, PAGE_SIZE, TypedVecIterator, VecIndex, LazyVecFrom1, LazyVecFrom2, PAGE_SIZE, TypedVecIterator, VecIndex, unlikely,
}; };
use crate::grouped::{ use crate::grouped::{
@@ -71,13 +71,11 @@ pub struct Vecs {
pub indexes_to_fee: ComputedValueVecsFromTxindex, pub indexes_to_fee: ComputedValueVecsFromTxindex,
pub indexes_to_fee_rate: ComputedVecsFromTxindex<FeeRate>, pub indexes_to_fee_rate: ComputedVecsFromTxindex<FeeRate>,
/// Value == 0 when Coinbase /// Value == 0 when Coinbase
pub txindex_to_input_value: pub txindex_to_input_value: EagerVec<TxIndex, Sats>,
LazyVecFrom3<TxIndex, Sats, TxIndex, TxInIndex, TxIndex, StoredU64, TxInIndex, Sats>,
pub indexes_to_sent: ComputedValueVecsFromHeight, pub indexes_to_sent: ComputedValueVecsFromHeight,
// pub indexes_to_input_value: ComputedVecsFromTxindex<Sats>, // pub indexes_to_input_value: ComputedVecsFromTxindex<Sats>,
pub indexes_to_opreturn_count: ComputedVecsFromHeight<StoredU64>, pub indexes_to_opreturn_count: ComputedVecsFromHeight<StoredU64>,
pub txindex_to_output_value: pub txindex_to_output_value: EagerVec<TxIndex, Sats>,
LazyVecFrom3<TxIndex, Sats, TxIndex, TxOutIndex, TxIndex, StoredU64, TxOutIndex, Sats>,
// pub indexes_to_output_value: ComputedVecsFromTxindex<Sats>, // pub indexes_to_output_value: ComputedVecsFromTxindex<Sats>,
pub indexes_to_p2a_count: ComputedVecsFromHeight<StoredU64>, pub indexes_to_p2a_count: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_p2ms_count: ComputedVecsFromHeight<StoredU64>, pub indexes_to_p2ms_count: ComputedVecsFromHeight<StoredU64>,
@@ -151,7 +149,7 @@ impl Vecs {
price: Option<&price::Vecs>, price: Option<&price::Vecs>,
) -> Result<Self> { ) -> Result<Self> {
let db = Database::open(&parent_path.join("chain"))?; let db = Database::open(&parent_path.join("chain"))?;
db.set_min_len(PAGE_SIZE * 10_000_000)?; db.set_min_len(PAGE_SIZE * 50_000_000)?;
let version = parent_version + Version::ZERO; let version = parent_version + Version::ZERO;
@@ -202,82 +200,11 @@ impl Vecs {
}, },
); );
let txindex_to_input_value = LazyVecFrom3::init( let txindex_to_input_value =
"input_value", EagerVec::forced_import_compressed(&db, "input_value", version + Version::ZERO)?;
version + Version::ZERO,
indexer.vecs.txindex_to_first_txinindex.boxed_clone(),
indexes.txindex_to_input_count.boxed_clone(),
txinindex_to_value.boxed_clone(),
|index: TxIndex,
txindex_to_first_txinindex_iter,
txindex_to_input_count_iter,
txinindex_to_value_iter| {
let txindex = index.to_usize();
txindex_to_first_txinindex_iter
.get_at(txindex)
.map(|first_index| {
let first_index = usize::from(first_index);
let count = *txindex_to_input_count_iter.get_at_unwrap(txindex);
let range = first_index..first_index + count as usize;
range.into_iter().fold(Sats::ZERO, |total, txinindex| {
total + txinindex_to_value_iter.get_at_unwrap(txinindex)
})
})
},
);
// let indexes_to_input_value: ComputedVecsFromTxindex<Sats> = let txindex_to_output_value =
// ComputedVecsFromTxindex::forced_import( EagerVec::forced_import_compressed(&db, "output_value", version + Version::ZERO)?;
// db,
// "input_value",
// true,
// version + Version::ZERO,
// format,
// computation,
// StorableVecGeneatorOptions::default()
// .add_average()
// .add_sum()
// .add_cumulative(),
// )?;
let txindex_to_output_value = LazyVecFrom3::init(
"output_value",
version + Version::ZERO,
indexer.vecs.txindex_to_first_txoutindex.boxed_clone(),
indexes.txindex_to_output_count.boxed_clone(),
indexer.vecs.txoutindex_to_value.boxed_clone(),
|index: TxIndex,
txindex_to_first_txoutindex_iter,
txindex_to_output_count_iter,
txoutindex_to_value_iter| {
let txindex = index.to_usize();
txindex_to_first_txoutindex_iter
.get_at(txindex)
.map(|first_index| {
let first_index = usize::from(first_index);
let count = *txindex_to_output_count_iter.get_at_unwrap(txindex);
let range = first_index..first_index + count as usize;
range.into_iter().fold(Sats::ZERO, |total, txoutindex| {
let v = txoutindex_to_value_iter.get_at_unwrap(txoutindex);
total + v
})
})
},
);
// let indexes_to_output_value: ComputedVecsFromTxindex<Sats> =
// ComputedVecsFromTxindex::forced_import(
// db,
// "output_value",
// true,
// version + Version::ZERO,
// format,
// computation,
// StorableVecGeneatorOptions::default()
// .add_average()
// .add_sum()
// .add_cumulative(),
// )?;
let txindex_to_fee = let txindex_to_fee =
EagerVec::forced_import_compressed(&db, "fee", version + Version::ZERO)?; EagerVec::forced_import_compressed(&db, "fee", version + Version::ZERO)?;
@@ -1084,8 +1011,6 @@ impl Vecs {
txindex_to_is_coinbase, txindex_to_is_coinbase,
txinindex_to_value, txinindex_to_value,
// indexes_to_input_value,
// indexes_to_output_value,
txindex_to_input_value, txindex_to_input_value,
txindex_to_output_value, txindex_to_output_value,
txindex_to_fee, txindex_to_fee,
@@ -1102,6 +1027,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }
@@ -1365,7 +1292,9 @@ impl Vecs {
compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v2, TxVersion::TWO)?; compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v2, TxVersion::TWO)?;
compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v3, TxVersion::THREE)?; compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v3, TxVersion::THREE)?;
let mut txoutindex_to_value_iter = indexer.vecs.txoutindex_to_value.into_iter(); // Because random reads are needed, reading directly from the mmap is faster than using buffered iterators
let txoutindex_to_value = &indexer.vecs.txoutindex_to_value;
let txoutindex_to_value_reader = indexer.vecs.txoutindex_to_value.create_reader();
self.txinindex_to_value.compute_transform( self.txinindex_to_value.compute_transform(
starting_indexes.txinindex, starting_indexes.txinindex,
&indexes.txinindex_to_txoutindex, &indexes.txinindex_to_txoutindex,
@@ -1373,64 +1302,56 @@ impl Vecs {
let value = if txoutindex == TxOutIndex::COINBASE { let value = if txoutindex == TxOutIndex::COINBASE {
Sats::MAX Sats::MAX
} else { } else {
txoutindex_to_value_iter.get_unwrap(txoutindex) txoutindex_to_value
.unchecked_read(txoutindex, &txoutindex_to_value_reader)
.unwrap()
}; };
(txinindex, value) (txinindex, value)
}, },
exit, exit,
)?; )?;
// indexes.txinindex_to_txoutindex.boxed_clone(), // Debug: verify the computed txinindex_to_value
// indexer.vecs.txoutindex_to_value.boxed_clone(), dbg!("txinindex_to_value first 20:");
// |index: TxInIndex, txinindex_to_txoutindex_iter, txoutindex_to_value_iter| { for i in 0..20.min(self.txinindex_to_value.len()) {
// txinindex_to_txoutindex_iter.get_unwrap(index.to_usize()).map( let val = self.txinindex_to_value.read_at_unwrap_once(i);
// |(txinindex, txoutindex)| { dbg!((TxInIndex::from(i), val));
// let txoutindex = txoutindex; }
// if txoutindex == TxOutIndex::COINBASE {
// Sats::MAX
// } else if let Some((_, value)) =
// txoutindex_to_value_iter.get_unwrap(txoutindex.to_usize())
// {
// value
// } else {
// dbg!(txinindex, txoutindex);
// panic!()
// }
// },
// )
// },
// self.indexes_to_output_value.compute_all( // Debug: verify the computed txindex_to_input_count
// indexer, dbg!("txindex_to_input_count first 20:");
// indexes, for i in 0..20.min(indexes.txindex_to_input_count.len()) {
// starting_indexes, let val = indexes.txindex_to_input_count.read_at_unwrap_once(i);
// exit, dbg!((TxInIndex::from(i), val));
// |vec| { }
// vec.compute_sum_from_indexes(
// starting_indexes.txindex,
// &indexer.vecs.txindex_to_first_txoutindex,
// self.indexes_to_output_count.txindex.as_ref().unwrap(),
// &indexer.vecs.txoutindex_to_value,
// exit,
// )
// },
// )?;
// self.indexes_to_input_value.compute_all( self.txindex_to_input_value.compute_sum_from_indexes(
// indexer, starting_indexes.txindex,
// indexes, &indexer.vecs.txindex_to_first_txinindex,
// starting_indexes, &indexes.txindex_to_input_count,
// exit, &self.txinindex_to_value,
// |vec| { exit,
// vec.compute_sum_from_indexes( )?;
// starting_indexes.txindex,
// &indexer.vecs.txindex_to_first_txinindex, // Debug: verify the computed input values
// self.indexes_to_input_count.txindex.as_ref().unwrap(), for i in 0..10.min(self.txindex_to_input_value.len()) {
// &self.txinindex_to_value, let val = self.txindex_to_input_value.read_at_unwrap_once(i);
// exit, dbg!((TxIndex::from(i), "input_value", val));
// ) }
// },
// )?; self.txindex_to_output_value.compute_sum_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txoutindex,
&indexes.txindex_to_output_count,
&indexer.vecs.txoutindex_to_value,
exit,
)?;
// Debug: verify the computed output values
for i in 0..10.min(self.txindex_to_output_value.len()) {
let val = self.txindex_to_output_value.read_at_unwrap_once(i);
dbg!((TxIndex::from(i), "output_value", val));
}
self.txindex_to_fee.compute_transform2( self.txindex_to_fee.compute_transform2(
starting_indexes.txindex, starting_indexes.txindex,
@@ -1439,9 +1360,10 @@ impl Vecs {
|(i, input, output, ..)| { |(i, input, output, ..)| {
( (
i, i,
if input.is_max() { if unlikely(input.is_max()) {
Sats::ZERO Sats::ZERO
} else { } else {
dbg!((i, input, output));
input.checked_sub(output).unwrap() input.checked_sub(output).unwrap()
}, },
) )

View File

@@ -284,6 +284,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }

View File

@@ -167,6 +167,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }

View File

@@ -7,7 +7,7 @@ use brk_traversable::Traversable;
use brk_types::{DateIndex, Height, OHLCCents, Version}; use brk_types::{DateIndex, Height, OHLCCents, Version};
use vecdb::{ use vecdb::{
AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableVec, PAGE_SIZE, RawVec, AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableVec, PAGE_SIZE, RawVec,
VecIndex, VecIterator, TypedVecIterator, VecIndex,
}; };
use super::{Indexes, indexes}; use super::{Indexes, indexes};
@@ -49,6 +49,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }
@@ -75,24 +77,22 @@ impl Vecs {
let index = starting_indexes let index = starting_indexes
.height .height
.min(Height::from(self.height_to_price_ohlc_in_cents.len())); .min(Height::from(self.height_to_price_ohlc_in_cents.len()));
let mut prev_timestamp = index
.decremented()
.map(|prev_i| height_to_timestamp.iter().unwrap().get_unwrap(prev_i));
height_to_timestamp height_to_timestamp
.iter()? .iter()?
.skip(index.to_usize())
.enumerate() .enumerate()
.skip(index.to_usize())
.try_for_each(|(i, v)| -> Result<()> { .try_for_each(|(i, v)| -> Result<()> {
self.height_to_price_ohlc_in_cents.forced_push_at( self.height_to_price_ohlc_in_cents.forced_push_at(
i, i,
self.fetcher self.fetcher
.get_height( .get_height(i.into(), v, prev_timestamp)
i.into(),
v,
i.decremented().map(|prev_i| {
height_to_timestamp.into_iter().get_at_unwrap(prev_i)
}),
)
.unwrap(), .unwrap(),
exit, exit,
)?; )?;
prev_timestamp = Some(v);
Ok(()) Ok(())
})?; })?;
self.height_to_price_ohlc_in_cents.safe_flush(exit)?; self.height_to_price_ohlc_in_cents.safe_flush(exit)?;
@@ -100,24 +100,18 @@ impl Vecs {
let index = starting_indexes let index = starting_indexes
.dateindex .dateindex
.min(DateIndex::from(self.dateindex_to_price_ohlc_in_cents.len())); .min(DateIndex::from(self.dateindex_to_price_ohlc_in_cents.len()));
let mut prev = None; let mut prev = Some(index.decremented().map_or(OHLCCents::default(), |prev_i| {
self.dateindex_to_price_ohlc_in_cents
.iter()
.unwrap()
.get_unwrap(prev_i)
}));
indexes indexes
.dateindex_to_date .dateindex_to_date
.iter() .iter()
.skip(index.to_usize())
.enumerate() .enumerate()
.skip(index.to_usize())
.try_for_each(|(i, d)| -> Result<()> { .try_for_each(|(i, d)| -> Result<()> {
if prev.is_none() {
let i = i.to_usize();
prev.replace(if i > 0 {
self.dateindex_to_price_ohlc_in_cents
.into_iter()
.get_at_unwrap(i - 1)
} else {
OHLCCents::default()
});
}
let ohlc = if i.to_usize() + 100 >= self.dateindex_to_price_ohlc_in_cents.len() let ohlc = if i.to_usize() + 100 >= self.dateindex_to_price_ohlc_in_cents.len()
&& let Ok(mut ohlc) = self.fetcher.get_date(d) && let Ok(mut ohlc) = self.fetcher.get_date(d)
{ {

View File

@@ -222,8 +222,8 @@ where
}); });
source source
.iter() .iter()
.skip(index.to_usize())
.enumerate() .enumerate()
.skip(index.to_usize())
.try_for_each(|(i, v)| -> Result<()> { .try_for_each(|(i, v)| -> Result<()> {
cumulative += v; cumulative += v;
cumulative_vec.forced_push_at(i, cumulative, exit)?; cumulative_vec.forced_push_at(i, cumulative, exit)?;
@@ -252,7 +252,6 @@ where
let index = self.starting_index(max_from); let index = self.starting_index(max_from);
let mut count_indexes_iter = count_indexes.iter();
let mut source_iter = source.iter(); let mut source_iter = source.iter();
let cumulative_vec = self.cumulative.as_mut(); let cumulative_vec = self.cumulative.as_mut();
@@ -263,12 +262,14 @@ where
}) })
}); });
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes first_indexes
.iter() .iter()
.skip(index.to_usize())
.enumerate() .enumerate()
.skip(index.to_usize())
.try_for_each(|(index, first_index)| -> Result<()> { .try_for_each(|(index, first_index)| -> Result<()> {
let count_index = count_indexes_iter.get_at_unwrap(index); let count_index = count_indexes_iter.next().unwrap();
if let Some(first) = self.first.as_mut() { if let Some(first) = self.first.as_mut() {
let f = source_iter let f = source_iter
@@ -420,8 +421,6 @@ where
let index = self.starting_index(max_from); let index = self.starting_index(max_from);
let mut count_indexes_iter = count_indexes.iter();
let mut source_first_iter = source.first.as_ref().map(|f| f.iter()); let mut source_first_iter = source.first.as_ref().map(|f| f.iter());
let mut source_last_iter = source.last.as_ref().map(|f| f.iter()); let mut source_last_iter = source.last.as_ref().map(|f| f.iter());
let mut source_max_iter = source.max.as_ref().map(|f| f.iter()); let mut source_max_iter = source.max.as_ref().map(|f| f.iter());
@@ -435,12 +434,14 @@ where
}) })
}); });
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes first_indexes
.iter() .iter()
.skip(index.to_usize())
.enumerate() .enumerate()
.skip(index.to_usize())
.try_for_each(|(index, first_index, ..)| -> Result<()> { .try_for_each(|(index, first_index, ..)| -> Result<()> {
let count_index = count_indexes_iter.get_at_unwrap(index); let count_index = count_indexes_iter.next().unwrap();
if let Some(first) = self.first.as_mut() { if let Some(first) = self.first.as_mut() {
let v = source_first_iter.as_mut().unwrap().get_unwrap(first_index); let v = source_first_iter.as_mut().unwrap().get_unwrap(first_index);

View File

@@ -65,7 +65,7 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()), .map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(), len_source.clone(),
|i: I, source, len_source| { |i: I, source, len_source| {
if i.to_usize() >= len_source.len() { if i.to_usize() >= len_source.vec_len() {
return None; return None;
} }
source.get_at(S1I::min_from(i)) source.get_at(S1I::min_from(i))
@@ -90,10 +90,10 @@ where
), ),
len_source.clone(), len_source.clone(),
|i: I, source, len_source| { |i: I, source, len_source| {
if i.to_usize() >= len_source.len() { if i.to_usize() >= len_source.vec_len() {
return None; return None;
} }
source.get_at(S1I::max_from(i, source.len())) source.get_at(S1I::max_from(i, source.vec_len()))
}, },
)) ))
}), }),
@@ -107,10 +107,10 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()), .map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(), len_source.clone(),
|i: I, source, len_source| { |i: I, source, len_source| {
if i.to_usize() >= len_source.len() { if i.to_usize() >= len_source.vec_len() {
return None; return None;
} }
S1I::inclusive_range_from(i, source.len()) S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i)) .flat_map(|i| source.get_at(i))
.min() .min()
}, },
@@ -126,10 +126,10 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()), .map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(), len_source.clone(),
|i: I, source, len_source| { |i: I, source, len_source| {
if i.to_usize() >= len_source.len() { if i.to_usize() >= len_source.vec_len() {
return None; return None;
} }
S1I::inclusive_range_from(i, source.len()) S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i)) .flat_map(|i| source.get_at(i))
.max() .max()
}, },
@@ -145,10 +145,10 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()), .map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(), len_source.clone(),
|i: I, source, len_source| { |i: I, source, len_source| {
if i.to_usize() >= len_source.len() { if i.to_usize() >= len_source.vec_len() {
return None; return None;
} }
let vec = S1I::inclusive_range_from(i, source.len()) let vec = S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i)) .flat_map(|i| source.get_at(i))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if vec.is_empty() { if vec.is_empty() {
@@ -175,10 +175,10 @@ where
.map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()), .map_or_else(|| source.as_ref().unwrap().clone(), |v| v.clone()),
len_source.clone(), len_source.clone(),
|i: I, source, len_source| { |i: I, source, len_source| {
if i.to_usize() >= len_source.len() { if i.to_usize() >= len_source.vec_len() {
return None; return None;
} }
let vec = S1I::inclusive_range_from(i, source.len()) let vec = S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i)) .flat_map(|i| source.get_at(i))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if vec.is_empty() { if vec.is_empty() {
@@ -197,10 +197,10 @@ where
source_extra.cumulative.as_ref().unwrap().boxed_clone(), source_extra.cumulative.as_ref().unwrap().boxed_clone(),
len_source.clone(), len_source.clone(),
|i: I, source, len_source| { |i: I, source, len_source| {
if i.to_usize() >= len_source.len() { if i.to_usize() >= len_source.vec_len() {
return None; return None;
} }
source.get_at(S1I::max_from(i, source.len())) source.get_at(S1I::max_from(i, source.vec_len()))
}, },
)) ))
}), }),

View File

@@ -110,6 +110,7 @@ where
.merge_branches() .merge_branches()
.unwrap() .unwrap()
} }
fn iter_any_writable(&self) -> impl Iterator<Item = &dyn AnyWritableVec> { fn iter_any_writable(&self) -> impl Iterator<Item = &dyn AnyWritableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyWritableVec>> = let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyWritableVec>> =
Box::new(self.height.iter_any_writable()); Box::new(self.height.iter_any_writable());

View File

@@ -397,8 +397,8 @@ impl ComputedRatioVecsFromDateIndex {
.as_ref() .as_ref()
.unwrap() .unwrap()
.iter() .iter()
.skip(starting_dateindex.to_usize())
.enumerate() .enumerate()
.skip(starting_dateindex.to_usize())
.try_for_each(|(index, ratio)| -> Result<()> { .try_for_each(|(index, ratio)| -> Result<()> {
if index < min_ratio_date_usize { if index < min_ratio_date_usize {
self.ratio_pct5 self.ratio_pct5
@@ -611,28 +611,25 @@ impl ComputedRatioVecsFromDateIndex {
} }
fn mut_ratio_vecs(&mut self) -> Vec<&mut EagerVec<DateIndex, StoredF32>> { fn mut_ratio_vecs(&mut self) -> Vec<&mut EagerVec<DateIndex, StoredF32>> {
[ let mut vecs = Vec::with_capacity(6);
self.ratio_pct1 if let Some(v) = self.ratio_pct1.as_mut() {
.as_mut() vecs.push(v.dateindex.as_mut().unwrap());
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]), }
self.ratio_pct2 if let Some(v) = self.ratio_pct2.as_mut() {
.as_mut() vecs.push(v.dateindex.as_mut().unwrap());
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]), }
self.ratio_pct5 if let Some(v) = self.ratio_pct5.as_mut() {
.as_mut() vecs.push(v.dateindex.as_mut().unwrap());
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]), }
self.ratio_pct95 if let Some(v) = self.ratio_pct95.as_mut() {
.as_mut() vecs.push(v.dateindex.as_mut().unwrap());
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]), }
self.ratio_pct98 if let Some(v) = self.ratio_pct98.as_mut() {
.as_mut() vecs.push(v.dateindex.as_mut().unwrap());
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]), }
self.ratio_pct99 if let Some(v) = self.ratio_pct99.as_mut() {
.as_mut() vecs.push(v.dateindex.as_mut().unwrap());
.map_or(vec![], |v| vec![v.dateindex.as_mut().unwrap()]), }
] vecs
.into_iter()
.flatten()
.collect::<Vec<_>>()
} }
} }

View File

@@ -483,8 +483,6 @@ impl ComputedStandardDeviationVecsFromDateIndex {
sorted.sort_unstable(); sorted.sort_unstable();
let mut sma_iter = sma.iter();
let mut p0_5sd = self.p0_5sd.as_mut().map(|c| c.dateindex.as_mut().unwrap()); let mut p0_5sd = self.p0_5sd.as_mut().map(|c| c.dateindex.as_mut().unwrap());
let mut p1sd = self.p1sd.as_mut().map(|c| c.dateindex.as_mut().unwrap()); let mut p1sd = self.p1sd.as_mut().map(|c| c.dateindex.as_mut().unwrap());
let mut p1_5sd = self.p1_5sd.as_mut().map(|c| c.dateindex.as_mut().unwrap()); let mut p1_5sd = self.p1_5sd.as_mut().map(|c| c.dateindex.as_mut().unwrap());
@@ -499,11 +497,12 @@ impl ComputedStandardDeviationVecsFromDateIndex {
let mut m3sd = self.m3sd.as_mut().map(|c| c.dateindex.as_mut().unwrap()); let mut m3sd = self.m3sd.as_mut().map(|c| c.dateindex.as_mut().unwrap());
let min_date_usize = min_date.to_usize(); let min_date_usize = min_date.to_usize();
let mut sma_iter = sma.iter().skip(starting_dateindex.to_usize());
source source
.iter() .iter()
.skip(starting_dateindex.to_usize())
.enumerate() .enumerate()
.skip(starting_dateindex.to_usize())
.try_for_each(|(index, ratio)| -> Result<()> { .try_for_each(|(index, ratio)| -> Result<()> {
if index < min_date_usize { if index < min_date_usize {
self.sd.dateindex.as_mut().unwrap().forced_push_at( self.sd.dateindex.as_mut().unwrap().forced_push_at(
@@ -548,11 +547,13 @@ impl ComputedStandardDeviationVecsFromDateIndex {
if let Some(v) = m3sd.as_mut() { if let Some(v) = m3sd.as_mut() {
v.forced_push_at(index, StoredF32::NAN, exit)? v.forced_push_at(index, StoredF32::NAN, exit)?
} }
// Advance iterator to stay in sync
sma_iter.next();
} else { } else {
let pos = sorted.binary_search(&ratio).unwrap_or_else(|pos| pos); let pos = sorted.binary_search(&ratio).unwrap_or_else(|pos| pos);
sorted.insert(pos, ratio); sorted.insert(pos, ratio);
let avg = sma_iter.get_at_unwrap(index); let avg = sma_iter.next().unwrap();
let population = let population =
index.checked_sub(min_date_usize).unwrap().to_usize() as f32 + 1.0; index.checked_sub(min_date_usize).unwrap().to_usize() as f32 + 1.0;

View File

@@ -13,8 +13,8 @@ use brk_types::{
YearIndex, YearIndex,
}; };
use vecdb::{ use vecdb::{
Database, EagerVec, Exit, IterableCloneableVec, LazyVecFrom1, LazyVecFrom2, PAGE_SIZE, Database, EagerVec, Exit, GenericStoredVec, IterableCloneableVec, LazyVecFrom1, PAGE_SIZE,
TypedVecIterator, VecIndex, TypedVecIterator, unlikely,
}; };
const VERSION: Version = Version::ZERO; const VERSION: Version = Version::ZERO;
@@ -79,14 +79,11 @@ pub struct Vecs {
pub semesterindex_to_first_monthindex: EagerVec<SemesterIndex, MonthIndex>, pub semesterindex_to_first_monthindex: EagerVec<SemesterIndex, MonthIndex>,
pub semesterindex_to_monthindex_count: EagerVec<SemesterIndex, StoredU64>, pub semesterindex_to_monthindex_count: EagerVec<SemesterIndex, StoredU64>,
pub semesterindex_to_semesterindex: EagerVec<SemesterIndex, SemesterIndex>, pub semesterindex_to_semesterindex: EagerVec<SemesterIndex, SemesterIndex>,
pub txindex_to_input_count: pub txindex_to_input_count: EagerVec<TxIndex, StoredU64>,
LazyVecFrom2<TxIndex, StoredU64, TxIndex, TxInIndex, TxInIndex, TxOutIndex>, pub txindex_to_output_count: EagerVec<TxIndex, StoredU64>,
pub txindex_to_output_count:
LazyVecFrom2<TxIndex, StoredU64, TxIndex, TxOutIndex, TxOutIndex, Sats>,
pub txindex_to_txindex: LazyVecFrom1<TxIndex, TxIndex, TxIndex, Txid>, pub txindex_to_txindex: LazyVecFrom1<TxIndex, TxIndex, TxIndex, Txid>,
pub txinindex_to_txinindex: LazyVecFrom1<TxInIndex, TxInIndex, TxInIndex, OutPoint>, pub txinindex_to_txinindex: LazyVecFrom1<TxInIndex, TxInIndex, TxInIndex, OutPoint>,
pub txinindex_to_txoutindex: pub txinindex_to_txoutindex: EagerVec<TxInIndex, TxOutIndex>,
LazyVecFrom2<TxInIndex, TxOutIndex, TxInIndex, OutPoint, TxIndex, TxOutIndex>,
pub txoutindex_to_txoutindex: LazyVecFrom1<TxOutIndex, TxOutIndex, TxOutIndex, Sats>, pub txoutindex_to_txoutindex: LazyVecFrom1<TxOutIndex, TxOutIndex, TxOutIndex, Sats>,
pub unknownoutputindex_to_unknownoutputindex: pub unknownoutputindex_to_unknownoutputindex:
LazyVecFrom1<UnknownOutputIndex, UnknownOutputIndex, UnknownOutputIndex, TxIndex>, LazyVecFrom1<UnknownOutputIndex, UnknownOutputIndex, UnknownOutputIndex, TxIndex>,
@@ -110,179 +107,112 @@ impl Vecs {
let version = parent_version + VERSION; let version = parent_version + VERSION;
let txinindex_to_txoutindex = LazyVecFrom2::init(
"txoutindex",
version,
indexer.vecs.txinindex_to_outpoint.boxed_clone(),
indexer.vecs.txindex_to_first_txoutindex.boxed_clone(),
|index: TxInIndex, txinindex_to_outpoint_iter, txindex_to_first_txoutindex_iter| {
txinindex_to_outpoint_iter
.get_at(index.to_usize())
.map(|outpoint| {
if outpoint.is_coinbase() {
return TxOutIndex::COINBASE;
}
txindex_to_first_txoutindex_iter
.get_at_unwrap(outpoint.txindex().to_usize())
+ outpoint.vout()
})
},
);
let txoutindex_to_txoutindex = LazyVecFrom1::init(
"txoutindex",
version + Version::ZERO,
indexer.vecs.txoutindex_to_value.boxed_clone(),
|index, _| Some(index),
);
let txinindex_to_txinindex = LazyVecFrom1::init(
"txinindex",
version + Version::ZERO,
indexer.vecs.txinindex_to_outpoint.boxed_clone(),
|index, _| Some(index),
);
let txindex_to_txindex = LazyVecFrom1::init(
"txindex",
version + Version::ZERO,
indexer.vecs.txindex_to_txid.boxed_clone(),
|index, _| Some(index),
);
let txindex_to_input_count = LazyVecFrom2::init(
"input_count",
version + Version::ZERO,
indexer.vecs.txindex_to_first_txinindex.boxed_clone(),
txinindex_to_txoutindex.boxed_clone(),
|index: TxIndex, txindex_to_first_txinindex_iter, txinindex_to_txoutindex_iter| {
let txindex = index.to_usize();
txindex_to_first_txinindex_iter
.get_at(txindex)
.map(|start| {
let start = usize::from(start);
let end = txindex_to_first_txinindex_iter
.get_at(txindex + 1)
.map(usize::from)
.unwrap_or_else(|| txinindex_to_txoutindex_iter.len());
StoredU64::from((start..end).count())
})
},
);
let txindex_to_output_count = LazyVecFrom2::init(
"output_count",
version + Version::ZERO,
indexer.vecs.txindex_to_first_txoutindex.boxed_clone(),
indexer.vecs.txoutindex_to_value.boxed_clone(),
|index: TxIndex, txindex_to_first_txoutindex_iter, txoutindex_to_value_iter| {
let txindex = index.to_usize();
txindex_to_first_txoutindex_iter
.get_at(txindex)
.map(|start| {
let start = usize::from(start);
let end = txindex_to_first_txoutindex_iter
.get_at(txindex + 1)
.map(usize::from)
.unwrap_or_else(|| txoutindex_to_value_iter.len());
StoredU64::from((start..end).count())
})
},
);
let p2pk33addressindex_to_p2pk33addressindex = LazyVecFrom1::init(
"p2pk33addressindex",
version + Version::ZERO,
indexer.vecs.p2pk33addressindex_to_p2pk33bytes.boxed_clone(),
|index, _| Some(index),
);
let p2pk65addressindex_to_p2pk65addressindex = LazyVecFrom1::init(
"p2pk65addressindex",
version + Version::ZERO,
indexer.vecs.p2pk65addressindex_to_p2pk65bytes.boxed_clone(),
|index, _| Some(index),
);
let p2pkhaddressindex_to_p2pkhaddressindex = LazyVecFrom1::init(
"p2pkhaddressindex",
version + Version::ZERO,
indexer.vecs.p2pkhaddressindex_to_p2pkhbytes.boxed_clone(),
|index, _| Some(index),
);
let p2shaddressindex_to_p2shaddressindex = LazyVecFrom1::init(
"p2shaddressindex",
version + Version::ZERO,
indexer.vecs.p2shaddressindex_to_p2shbytes.boxed_clone(),
|index, _| Some(index),
);
let p2traddressindex_to_p2traddressindex = LazyVecFrom1::init(
"p2traddressindex",
version + Version::ZERO,
indexer.vecs.p2traddressindex_to_p2trbytes.boxed_clone(),
|index, _| Some(index),
);
let p2wpkhaddressindex_to_p2wpkhaddressindex = LazyVecFrom1::init(
"p2wpkhaddressindex",
version + Version::ZERO,
indexer.vecs.p2wpkhaddressindex_to_p2wpkhbytes.boxed_clone(),
|index, _| Some(index),
);
let p2wshaddressindex_to_p2wshaddressindex = LazyVecFrom1::init(
"p2wshaddressindex",
version + Version::ZERO,
indexer.vecs.p2wshaddressindex_to_p2wshbytes.boxed_clone(),
|index, _| Some(index),
);
let p2aaddressindex_to_p2aaddressindex = LazyVecFrom1::init(
"p2aaddressindex",
version + Version::ZERO,
indexer.vecs.p2aaddressindex_to_p2abytes.boxed_clone(),
|index, _| Some(index),
);
let p2msoutputindex_to_p2msoutputindex = LazyVecFrom1::init(
"p2msoutputindex",
version + Version::ZERO,
indexer.vecs.p2msoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
);
let emptyoutputindex_to_emptyoutputindex = LazyVecFrom1::init(
"emptyoutputindex",
version + Version::ZERO,
indexer.vecs.emptyoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
);
let unknownoutputindex_to_unknownoutputindex = LazyVecFrom1::init(
"unknownoutputindex",
version + Version::ZERO,
indexer.vecs.unknownoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
);
let opreturnindex_to_opreturnindex = LazyVecFrom1::init(
"opreturnindex",
version + Version::ZERO,
indexer.vecs.opreturnindex_to_txindex.boxed_clone(),
|index, _| Some(index),
);
let this = Self { let this = Self {
txinindex_to_txoutindex, txinindex_to_txoutindex: EagerVec::forced_import_compressed(
emptyoutputindex_to_emptyoutputindex, &db,
txinindex_to_txinindex, "txoutindex",
opreturnindex_to_opreturnindex, version,
txoutindex_to_txoutindex, )?,
p2aaddressindex_to_p2aaddressindex, txoutindex_to_txoutindex: LazyVecFrom1::init(
p2msoutputindex_to_p2msoutputindex, "txoutindex",
p2pk33addressindex_to_p2pk33addressindex, version + Version::ZERO,
p2pk65addressindex_to_p2pk65addressindex, indexer.vecs.txoutindex_to_value.boxed_clone(),
p2pkhaddressindex_to_p2pkhaddressindex, |index, _| Some(index),
p2shaddressindex_to_p2shaddressindex, ),
p2traddressindex_to_p2traddressindex, txinindex_to_txinindex: LazyVecFrom1::init(
p2wpkhaddressindex_to_p2wpkhaddressindex, "txinindex",
p2wshaddressindex_to_p2wshaddressindex, version + Version::ZERO,
txindex_to_input_count, indexer.vecs.txinindex_to_outpoint.boxed_clone(),
txindex_to_output_count, |index, _| Some(index),
txindex_to_txindex, ),
unknownoutputindex_to_unknownoutputindex, p2pk33addressindex_to_p2pk33addressindex: LazyVecFrom1::init(
"p2pk33addressindex",
version + Version::ZERO,
indexer.vecs.p2pk33addressindex_to_p2pk33bytes.boxed_clone(),
|index, _| Some(index),
),
p2pk65addressindex_to_p2pk65addressindex: LazyVecFrom1::init(
"p2pk65addressindex",
version + Version::ZERO,
indexer.vecs.p2pk65addressindex_to_p2pk65bytes.boxed_clone(),
|index, _| Some(index),
),
p2pkhaddressindex_to_p2pkhaddressindex: LazyVecFrom1::init(
"p2pkhaddressindex",
version + Version::ZERO,
indexer.vecs.p2pkhaddressindex_to_p2pkhbytes.boxed_clone(),
|index, _| Some(index),
),
p2shaddressindex_to_p2shaddressindex: LazyVecFrom1::init(
"p2shaddressindex",
version + Version::ZERO,
indexer.vecs.p2shaddressindex_to_p2shbytes.boxed_clone(),
|index, _| Some(index),
),
p2traddressindex_to_p2traddressindex: LazyVecFrom1::init(
"p2traddressindex",
version + Version::ZERO,
indexer.vecs.p2traddressindex_to_p2trbytes.boxed_clone(),
|index, _| Some(index),
),
p2wpkhaddressindex_to_p2wpkhaddressindex: LazyVecFrom1::init(
"p2wpkhaddressindex",
version + Version::ZERO,
indexer.vecs.p2wpkhaddressindex_to_p2wpkhbytes.boxed_clone(),
|index, _| Some(index),
),
p2wshaddressindex_to_p2wshaddressindex: LazyVecFrom1::init(
"p2wshaddressindex",
version + Version::ZERO,
indexer.vecs.p2wshaddressindex_to_p2wshbytes.boxed_clone(),
|index, _| Some(index),
),
p2aaddressindex_to_p2aaddressindex: LazyVecFrom1::init(
"p2aaddressindex",
version + Version::ZERO,
indexer.vecs.p2aaddressindex_to_p2abytes.boxed_clone(),
|index, _| Some(index),
),
p2msoutputindex_to_p2msoutputindex: LazyVecFrom1::init(
"p2msoutputindex",
version + Version::ZERO,
indexer.vecs.p2msoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
emptyoutputindex_to_emptyoutputindex: LazyVecFrom1::init(
"emptyoutputindex",
version + Version::ZERO,
indexer.vecs.emptyoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
unknownoutputindex_to_unknownoutputindex: LazyVecFrom1::init(
"unknownoutputindex",
version + Version::ZERO,
indexer.vecs.unknownoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
opreturnindex_to_opreturnindex: LazyVecFrom1::init(
"opreturnindex",
version + Version::ZERO,
indexer.vecs.opreturnindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
txindex_to_txindex: LazyVecFrom1::init(
"txindex",
version + Version::ZERO,
indexer.vecs.txindex_to_txid.boxed_clone(),
|index, _| Some(index),
),
txindex_to_input_count: EagerVec::forced_import_compressed(
&db,
"input_count",
version + Version::ZERO,
)?,
txindex_to_output_count: EagerVec::forced_import_compressed(
&db,
"output_count",
version + Version::ZERO,
)?,
dateindex_to_date: EagerVec::forced_import_compressed( dateindex_to_date: EagerVec::forced_import_compressed(
&db, &db,
"date", "date",
@@ -497,6 +427,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }
@@ -517,10 +449,46 @@ impl Vecs {
starting_indexes: brk_indexer::Indexes, starting_indexes: brk_indexer::Indexes,
exit: &Exit, exit: &Exit,
) -> Result<Indexes> { ) -> Result<Indexes> {
// ---
// TxInIndex
// ---
let txindex_to_first_txoutindex = &indexer.vecs.txindex_to_first_txoutindex;
let txindex_to_first_txoutindex_reader = txindex_to_first_txoutindex.create_reader();
self.txinindex_to_txoutindex.compute_transform(
starting_indexes.txinindex,
&indexer.vecs.txinindex_to_outpoint,
|(txinindex, outpoint, ..)| {
if unlikely(outpoint.is_coinbase()) {
(txinindex, TxOutIndex::COINBASE)
} else {
let txoutindex = txindex_to_first_txoutindex
.read_unwrap(outpoint.txindex(), &txindex_to_first_txoutindex_reader)
+ outpoint.vout();
(txinindex, txoutindex)
}
},
exit,
)?;
// --- // ---
// TxIndex // TxIndex
// --- // ---
self.txindex_to_input_count.compute_count_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txinindex,
&self.txinindex_to_txoutindex,
exit,
)?;
self.txindex_to_output_count.compute_count_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txoutindex,
&indexer.vecs.txoutindex_to_value,
exit,
)?;
self.height_to_txindex_count.compute_count_from_indexes( self.height_to_txindex_count.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs.height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
@@ -928,6 +896,7 @@ impl Vecs {
} }
} }
#[derive(Debug)]
pub struct Indexes { pub struct Indexes {
indexes: brk_indexer::Indexes, indexes: brk_indexer::Indexes,
pub dateindex: DateIndex, pub dateindex: DateIndex,

View File

@@ -174,35 +174,35 @@ impl Computer {
self.blks self.blks
.compute(indexer, &starting_indexes, parser, exit)?; .compute(indexer, &starting_indexes, parser, exit)?;
std::thread::scope(|scope| -> Result<()> { // std::thread::scope(|scope| -> Result<()> {
let constants = scope.spawn(|| -> Result<()> { // let constants = scope.spawn(|| -> Result<()> {
info!("Computing constants..."); info!("Computing constants...");
self.constants self.constants
.compute(&self.indexes, &starting_indexes, exit)?; .compute(&self.indexes, &starting_indexes, exit)?;
Ok(()) // Ok(())
}); // });
let chain = scope.spawn(|| -> Result<()> { // let chain = scope.spawn(|| -> Result<()> {
info!("Computing chain..."); info!("Computing chain...");
self.chain.compute( self.chain.compute(
indexer, indexer,
&self.indexes, &self.indexes,
&starting_indexes, &starting_indexes,
self.price.as_ref(), self.price.as_ref(),
exit, exit,
)?; )?;
Ok(()) // Ok(())
}); // });
if let Some(price) = self.price.as_ref() { if let Some(price) = self.price.as_ref() {
info!("Computing market..."); info!("Computing market...");
self.market.compute(price, &starting_indexes, exit)?; self.market.compute(price, &starting_indexes, exit)?;
} }
constants.join().unwrap()?; // constants.join().unwrap()?;
chain.join().unwrap()?; // chain.join().unwrap()?;
Ok(()) // Ok(())
})?; // })?;
self.pools.compute( self.pools.compute(
indexer, indexer,
@@ -213,6 +213,8 @@ impl Computer {
exit, exit,
)?; )?;
return Ok(());
info!("Computing stateful..."); info!("Computing stateful...");
self.stateful.compute( self.stateful.compute(
indexer, indexer,

View File

@@ -1522,6 +1522,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }

View File

@@ -67,6 +67,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }

File diff suppressed because it is too large Load Diff

View File

@@ -487,6 +487,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }

View File

@@ -23,6 +23,7 @@ pub trait ComputeDCAStackViaLen {
exit: &Exit, exit: &Exit,
) -> Result<()>; ) -> Result<()>;
} }
impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> { impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> {
fn compute_dca_stack_via_len( fn compute_dca_stack_via_len(
&mut self, &mut self,
@@ -35,41 +36,40 @@ impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> {
Version::ZERO + self.inner_version() + closes.version(), Version::ZERO + self.inner_version() + closes.version(),
)?; )?;
let mut other_iter = closes.iter();
let mut prev = None;
let index = max_from.to_usize().min(self.len()); let index = max_from.to_usize().min(self.len());
// Initialize prev before the loop to avoid checking on every iteration
let mut prev = if index == 0 {
Sats::ZERO
} else {
self.read_at_unwrap_once(index - 1)
};
let mut lookback = closes.create_lookback(index, len, 0);
closes closes
.iter() .iter()
.skip(index)
.enumerate() .enumerate()
.skip(index)
.try_for_each(|(i, closes)| { .try_for_each(|(i, closes)| {
let price = *closes; let price = *closes;
let i_usize = i.to_usize(); let i_usize = i.to_usize();
if prev.is_none() {
if i_usize == 0 {
prev.replace(Sats::ZERO);
} else {
prev.replace(self.read_at_unwrap_once(i_usize - 1));
}
}
let mut stack = Sats::ZERO; let mut stack = Sats::ZERO;
if price != Dollars::ZERO { if price != Dollars::ZERO {
stack = prev.unwrap() + Sats::from(Bitcoin::from(DCA_AMOUNT / price)); stack = prev + Sats::from(Bitcoin::from(DCA_AMOUNT / price));
if i_usize >= len { let prev_price =
let prev_price = *other_iter.get_at_unwrap(i_usize - len); *lookback.get_and_push(i_usize, Close::new(price), Close::default());
if prev_price != Dollars::ZERO { if prev_price != Dollars::ZERO {
stack = stack stack = stack
.checked_sub(Sats::from(Bitcoin::from(DCA_AMOUNT / prev_price))) .checked_sub(Sats::from(Bitcoin::from(DCA_AMOUNT / prev_price)))
.unwrap(); .unwrap();
}
} }
} }
prev.replace(stack); prev = stack;
self.forced_push_at(i, stack, exit) self.forced_push_at(i, stack, exit)
})?; })?;
@@ -90,32 +90,30 @@ impl ComputeDCAStackViaLen for EagerVec<DateIndex, Sats> {
Version::ZERO + self.inner_version() + closes.version(), Version::ZERO + self.inner_version() + closes.version(),
)?; )?;
let mut prev = None;
let from = from.to_usize(); let from = from.to_usize();
let index = max_from.min(DateIndex::from(self.len())); let index = max_from.min(DateIndex::from(self.len()));
// Initialize prev before the loop to avoid checking on every iteration
let mut prev = if index.to_usize() == 0 {
Sats::ZERO
} else {
self.read_at_unwrap_once(index.to_usize() - 1)
};
closes closes
.iter() .iter()
.skip(index.to_usize())
.enumerate() .enumerate()
.skip(index.to_usize())
.try_for_each(|(i, closes)| { .try_for_each(|(i, closes)| {
let price = *closes; let price = *closes;
let i_usize = i.to_usize();
if prev.is_none() {
if i_usize == 0 {
prev.replace(Sats::ZERO);
} else {
prev.replace(self.read_at_unwrap_once(i_usize - 1));
}
}
let mut stack = Sats::ZERO; let mut stack = Sats::ZERO;
if price != Dollars::ZERO && i >= from { if price != Dollars::ZERO && i >= from {
stack = prev.unwrap() + Sats::from(Bitcoin::from(DCA_AMOUNT / price)); stack = prev + Sats::from(Bitcoin::from(DCA_AMOUNT / price));
} }
prev.replace(stack); prev = stack;
self.forced_push_at(i, stack, exit) self.forced_push_at(i, stack, exit)
})?; })?;
@@ -143,6 +141,7 @@ pub trait ComputeDCAAveragePriceViaLen {
exit: &Exit, exit: &Exit,
) -> Result<()>; ) -> Result<()>;
} }
impl ComputeDCAAveragePriceViaLen for EagerVec<DateIndex, Dollars> { impl ComputeDCAAveragePriceViaLen for EagerVec<DateIndex, Dollars> {
fn compute_dca_avg_price_via_len( fn compute_dca_avg_price_via_len(
&mut self, &mut self,
@@ -163,8 +162,8 @@ impl ComputeDCAAveragePriceViaLen for EagerVec<DateIndex, Dollars> {
stacks stacks
.iter() .iter()
.skip(index.to_usize())
.enumerate() .enumerate()
.skip(index.to_usize())
.try_for_each(|(i, stack)| { .try_for_each(|(i, stack)| {
let mut avg_price = Dollars::from(f64::NAN); let mut avg_price = Dollars::from(f64::NAN);
if i > first_price_date { if i > first_price_date {
@@ -199,8 +198,8 @@ impl ComputeDCAAveragePriceViaLen for EagerVec<DateIndex, Dollars> {
stacks stacks
.iter() .iter()
.skip(index.to_usize())
.enumerate() .enumerate()
.skip(index.to_usize())
.try_for_each(|(i, stack)| { .try_for_each(|(i, stack)| {
let mut avg_price = Dollars::from(f64::NAN); let mut avg_price = Dollars::from(f64::NAN);
if i >= from { if i >= from {
@@ -223,6 +222,7 @@ pub trait ComputeFromSats<I> {
exit: &Exit, exit: &Exit,
) -> Result<()>; ) -> Result<()>;
} }
impl<I> ComputeFromSats<I> for EagerVec<I, Bitcoin> impl<I> ComputeFromSats<I> for EagerVec<I, Bitcoin>
where where
I: VecIndex, I: VecIndex,
@@ -233,21 +233,12 @@ where
sats: &impl IterableVec<I, Sats>, sats: &impl IterableVec<I, Sats>,
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.validate_computed_version_or_reset( self.compute_transform(
Version::ZERO + self.inner_version() + sats.version(), max_from,
sats,
|(i, sats, _)| (i, Bitcoin::from(sats)),
exit,
)?; )?;
let index = max_from.min(I::from(self.len()));
sats.iter()
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, sats)| {
let (i, v) = (i, Bitcoin::from(sats));
self.forced_push_at(i, v, exit)
})?;
self.safe_flush(exit)?;
Ok(()) Ok(())
} }
} }
@@ -261,6 +252,7 @@ pub trait ComputeFromBitcoin<I> {
exit: &Exit, exit: &Exit,
) -> Result<()>; ) -> Result<()>;
} }
impl<I> ComputeFromBitcoin<I> for EagerVec<I, Dollars> impl<I> ComputeFromBitcoin<I> for EagerVec<I, Dollars>
where where
I: VecIndex, I: VecIndex,
@@ -272,24 +264,13 @@ where
price: &impl IterableVec<I, Close<Dollars>>, price: &impl IterableVec<I, Close<Dollars>>,
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.validate_computed_version_or_reset( self.compute_transform2(
Version::ZERO + self.inner_version() + bitcoin.version(), max_from,
bitcoin,
price,
|(i, bitcoin, price, _)| (i, *price * bitcoin),
exit,
)?; )?;
let mut price_iter = price.iter();
let index = max_from.min(I::from(self.len()));
bitcoin
.iter()
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, bitcoin)| {
let dollars = price_iter.get_at_unwrap(i);
let (i, v) = (i, *dollars * bitcoin);
self.forced_push_at(i, v, exit)
})?;
self.safe_flush(exit)?;
Ok(()) Ok(())
} }
} }
@@ -303,6 +284,7 @@ pub trait ComputeDrawdown<I> {
exit: &Exit, exit: &Exit,
) -> Result<()>; ) -> Result<()>;
} }
impl<I> ComputeDrawdown<I> for EagerVec<I, StoredF32> impl<I> ComputeDrawdown<I> for EagerVec<I, StoredF32>
where where
I: VecIndex, I: VecIndex,
@@ -314,27 +296,20 @@ where
ath: &impl IterableVec<I, Dollars>, ath: &impl IterableVec<I, Dollars>,
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
self.validate_computed_version_or_reset( self.compute_transform2(
Version::ZERO + self.inner_version() + ath.version() + close.version(), max_from,
)?; ath,
close,
let index = max_from.min(I::from(self.len())); |(i, ath, close, _)| {
let mut close_iter = close.iter();
ath.iter()
.skip(index.to_usize())
.enumerate()
.try_for_each(|(i, ath)| {
if ath == Dollars::ZERO { if ath == Dollars::ZERO {
self.forced_push_at(i, StoredF32::default(), exit) (i, StoredF32::default())
} else { } else {
let close = *close_iter.get_at_unwrap(i); let drawdown = StoredF32::from((*ath - **close) / *ath * -100.0);
let drawdown = StoredF32::from((*ath - *close) / *ath * -100.0); (i, drawdown)
self.forced_push_at(i, drawdown, exit)
} }
})?; },
exit,
self.safe_flush(exit)?; )?;
Ok(()) Ok(())
} }
} }

View File

@@ -16,7 +16,7 @@ fjall2 = { workspace = true }
fjall3 = { workspace = true } fjall3 = { workspace = true }
jiff = { workspace = true } jiff = { workspace = true }
minreq = { workspace = true } minreq = { workspace = true }
sonic-rs = { workspace = true } serde_json = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
vecdb = { workspace = true } vecdb = { workspace = true }
zerocopy = { workspace = true } zerocopy = { workspace = true }

View File

@@ -23,7 +23,7 @@ pub enum Error {
BitcoinHexError(bitcoin::consensus::encode::FromHexError), BitcoinHexError(bitcoin::consensus::encode::FromHexError),
BitcoinFromScriptError(bitcoin::address::FromScriptError), BitcoinFromScriptError(bitcoin::address::FromScriptError),
BitcoinHexToArrayError(bitcoin::hex::HexToArrayError), BitcoinHexToArrayError(bitcoin::hex::HexToArrayError),
SonicRS(sonic_rs::Error), SerdeJSON(serde_json::Error),
TokioJoin(tokio::task::JoinError), TokioJoin(tokio::task::JoinError),
ZeroCopyError, ZeroCopyError,
Vecs(vecdb::Error), Vecs(vecdb::Error),
@@ -86,10 +86,10 @@ impl From<time::SystemTimeError> for Error {
} }
} }
impl From<sonic_rs::Error> for Error { impl From<serde_json::Error> for Error {
#[inline] #[inline]
fn from(error: sonic_rs::Error) -> Self { fn from(error: serde_json::Error) -> Self {
Self::SonicRS(error) Self::SerdeJSON(error)
} }
} }
@@ -192,7 +192,7 @@ impl fmt::Display for Error {
Error::Jiff(error) => Display::fmt(&error, f), Error::Jiff(error) => Display::fmt(&error, f),
Error::Minreq(error) => Display::fmt(&error, f), Error::Minreq(error) => Display::fmt(&error, f),
Error::RawDB(error) => Display::fmt(&error, f), Error::RawDB(error) => Display::fmt(&error, f),
Error::SonicRS(error) => Display::fmt(&error, f), Error::SerdeJSON(error) => Display::fmt(&error, f),
Error::SystemTimeError(error) => Display::fmt(&error, f), Error::SystemTimeError(error) => Display::fmt(&error, f),
Error::TokioJoin(error) => Display::fmt(&error, f), Error::TokioJoin(error) => Display::fmt(&error, f),
Error::VecDB(error) => Display::fmt(&error, f), Error::VecDB(error) => Display::fmt(&error, f),

View File

@@ -15,4 +15,4 @@ brk_logger = { workspace = true }
brk_types = { workspace = true } brk_types = { workspace = true }
log = { workspace = true } log = { workspace = true }
minreq = { workspace = true } minreq = { workspace = true }
sonic-rs = { workspace = true } serde_json = { workspace = true }

View File

@@ -8,7 +8,7 @@ use std::{
use brk_error::{Error, Result}; use brk_error::{Error, Result};
use brk_types::{Cents, OHLCCents, Timestamp}; use brk_types::{Cents, OHLCCents, Timestamp};
use log::info; use log::info;
use sonic_rs::{JsonContainerTrait, JsonValueTrait, Value}; use serde_json::Value;
use crate::{Close, Date, Dollars, Fetcher, High, Low, Open, default_retry}; use crate::{Close, Date, Dollars, Fetcher, High, Low, Open, default_retry};
@@ -68,10 +68,10 @@ impl Binance {
info!("Fetching 1mn prices from Binance..."); info!("Fetching 1mn prices from Binance...");
default_retry(|_| { default_retry(|_| {
Self::json_to_timestamp_to_ohlc(&sonic_rs::from_str( Self::json_to_timestamp_to_ohlc(&serde_json::from_slice(
minreq::get(Self::url("interval=1m&limit=1000")) minreq::get(Self::url("interval=1m&limit=1000"))
.send()? .send()?
.as_str()?, .as_bytes(),
)?) )?)
}) })
} }
@@ -93,8 +93,8 @@ impl Binance {
info!("Fetching daily prices from Binance..."); info!("Fetching daily prices from Binance...");
default_retry(|_| { default_retry(|_| {
Self::json_to_date_to_ohlc(&sonic_rs::from_str( Self::json_to_date_to_ohlc(&serde_json::from_slice(
minreq::get(Self::url("interval=1d")).send()?.as_str()?, minreq::get(Self::url("interval=1d")).send()?.as_bytes(),
)?) )?)
}) })
} }
@@ -120,7 +120,7 @@ impl Binance {
let reader = BufReader::new(file); let reader = BufReader::new(file);
let json: BTreeMap<String, Value> = if let Ok(json) = sonic_rs::from_reader(reader) { let json: BTreeMap<String, Value> = if let Ok(json) = serde_json::from_reader(reader) {
json json
} else { } else {
return Ok(Default::default()); return Ok(Default::default());
@@ -130,7 +130,7 @@ impl Binance {
.ok_or(Error::Str("Expect object to have log attribute"))? .ok_or(Error::Str("Expect object to have log attribute"))?
.as_object() .as_object()
.ok_or(Error::Str("Expect to be an object"))? .ok_or(Error::Str("Expect to be an object"))?
.get(&"entries") .get("entries")
.ok_or(Error::Str("Expect object to have entries"))? .ok_or(Error::Str("Expect object to have entries"))?
.as_array() .as_array()
.ok_or(Error::Str("Expect to be an array"))? .ok_or(Error::Str("Expect to be an array"))?
@@ -139,11 +139,11 @@ impl Binance {
entry entry
.as_object() .as_object()
.unwrap() .unwrap()
.get(&"request") .get("request")
.unwrap() .unwrap()
.as_object() .as_object()
.unwrap() .unwrap()
.get(&"url") .get("url")
.unwrap() .unwrap()
.as_str() .as_str()
.unwrap() .unwrap()
@@ -153,14 +153,14 @@ impl Binance {
let response = entry let response = entry
.as_object() .as_object()
.unwrap() .unwrap()
.get(&"response") .get("response")
.unwrap() .unwrap()
.as_object() .as_object()
.unwrap(); .unwrap();
let content = response.get(&"content").unwrap().as_object().unwrap(); let content = response.get("content").unwrap().as_object().unwrap();
let text = content.get(&"text"); let text = content.get("text");
if text.is_none() { if text.is_none() {
return Ok(BTreeMap::new()); return Ok(BTreeMap::new());
@@ -168,7 +168,7 @@ impl Binance {
let text = text.unwrap().as_str().unwrap(); let text = text.unwrap().as_str().unwrap();
Self::json_to_timestamp_to_ohlc(&sonic_rs::from_str(text).unwrap()) Self::json_to_timestamp_to_ohlc(&serde_json::from_str(text).unwrap())
}) })
.try_fold(BTreeMap::default(), |mut all, res| { .try_fold(BTreeMap::default(), |mut all, res| {
all.append(&mut res?); all.append(&mut res?);

View File

@@ -3,7 +3,7 @@ use std::collections::BTreeMap;
use brk_error::{Error, Result}; use brk_error::{Error, Result};
use brk_types::{Cents, CheckedSub, Date, DateIndex, Height, OHLCCents}; use brk_types::{Cents, CheckedSub, Date, DateIndex, Height, OHLCCents};
use log::info; use log::info;
use sonic_rs::{JsonContainerTrait, JsonValueTrait, Value}; use serde_json::Value;
use crate::{Close, Dollars, High, Low, Open, default_retry}; use crate::{Close, Dollars, High, Low, Open, default_retry};
@@ -51,7 +51,7 @@ impl BRK {
height + CHUNK_SIZE height + CHUNK_SIZE
); );
let body: Value = sonic_rs::from_str(minreq::get(url).send()?.as_str()?)?; let body: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?;
body.as_array() body.as_array()
.ok_or(Error::Str("Expect to be an array"))? .ok_or(Error::Str("Expect to be an array"))?
@@ -96,7 +96,7 @@ impl BRK {
dateindex + CHUNK_SIZE dateindex + CHUNK_SIZE
); );
let body: Value = sonic_rs::from_str(minreq::get(url).send()?.as_str()?)?; let body: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?;
body.as_array() body.as_array()
.ok_or(Error::Str("Expect to be an array"))? .ok_or(Error::Str("Expect to be an array"))?

View File

@@ -3,7 +3,7 @@ use std::collections::BTreeMap;
use brk_error::{Error, Result}; use brk_error::{Error, Result};
use brk_types::{Cents, Close, Date, Dollars, High, Low, OHLCCents, Open, Timestamp}; use brk_types::{Cents, Close, Date, Dollars, High, Low, OHLCCents, Open, Timestamp};
use log::info; use log::info;
use sonic_rs::{JsonContainerTrait, JsonValueTrait, Value}; use serde_json::Value;
use crate::{Fetcher, default_retry}; use crate::{Fetcher, default_retry};
@@ -36,8 +36,8 @@ impl Kraken {
info!("Fetching 1mn prices from Kraken..."); info!("Fetching 1mn prices from Kraken...");
default_retry(|_| { default_retry(|_| {
Self::json_to_timestamp_to_ohlc(&sonic_rs::from_str( Self::json_to_timestamp_to_ohlc(&serde_json::from_slice(
minreq::get(Self::url(1)).send()?.as_str()?, minreq::get(Self::url(1)).send()?.as_bytes(),
)?) )?)
}) })
} }
@@ -58,8 +58,8 @@ impl Kraken {
info!("Fetching daily prices from Kraken..."); info!("Fetching daily prices from Kraken...");
default_retry(|_| { default_retry(|_| {
Self::json_to_date_to_ohlc(&sonic_rs::from_str( Self::json_to_date_to_ohlc(&serde_json::from_slice(
minreq::get(Self::url(1440)).send()?.as_str()?, minreq::get(Self::url(1440)).send()?.as_bytes(),
)?) )?)
}) })
} }
@@ -79,11 +79,11 @@ impl Kraken {
{ {
json.as_object() json.as_object()
.ok_or(Error::Str("Expect to be an object"))? .ok_or(Error::Str("Expect to be an object"))?
.get(&"result") .get("result")
.ok_or(Error::Str("Expect object to have result"))? .ok_or(Error::Str("Expect object to have result"))?
.as_object() .as_object()
.ok_or(Error::Str("Expect to be an object"))? .ok_or(Error::Str("Expect to be an object"))?
.get(&"XXBTZUSD") .get("XXBTZUSD")
.ok_or(Error::Str("Expect to have XXBTZUSD"))? .ok_or(Error::Str("Expect to have XXBTZUSD"))?
.as_array() .as_array()
.ok_or(Error::Str("Expect to be an array"))? .ok_or(Error::Str("Expect to be an array"))?

View File

@@ -1,4 +1,8 @@
use std::{env, fs, path::Path, time::Instant}; use std::{
env, fs,
path::Path,
time::{Duration, Instant},
};
use brk_bencher::Bencher; use brk_bencher::Bencher;
use brk_error::Result; use brk_error::Result;
@@ -6,7 +10,7 @@ use brk_indexer::Indexer;
use brk_iterator::Blocks; use brk_iterator::Blocks;
use brk_reader::Reader; use brk_reader::Reader;
use brk_rpc::{Auth, Client}; use brk_rpc::{Auth, Client};
use log::debug; use log::{debug, info};
use vecdb::Exit; use vecdb::Exit;
fn main() -> Result<()> { fn main() -> Result<()> {
@@ -33,7 +37,7 @@ fn main() -> Result<()> {
let mut indexer = Indexer::forced_import(&outputs_dir)?; let mut indexer = Indexer::forced_import(&outputs_dir)?;
let mut bencher = let mut bencher =
Bencher::from_cargo_env(env!("CARGO_PKG_NAME"), &outputs_dir.join("indexed/stores"))?; Bencher::from_cargo_env(env!("CARGO_PKG_NAME"), &outputs_dir.join("indexed"))?;
bencher.start()?; bencher.start()?;
let exit = Exit::new(); let exit = Exit::new();
@@ -46,10 +50,12 @@ fn main() -> Result<()> {
let i = Instant::now(); let i = Instant::now();
indexer.checked_index(&blocks, &client, &exit)?; indexer.checked_index(&blocks, &client, &exit)?;
dbg!(i.elapsed()); info!("Done in {:?}", i.elapsed());
// We want to benchmark the drop too // We want to benchmark the drop too
drop(indexer); drop(indexer);
std::thread::sleep(Duration::from_secs(10));
Ok(()) Ok(())
} }

View File

@@ -11,15 +11,27 @@ fn main() -> Result<()> {
let indexer = Indexer::forced_import(&outputs_dir)?; let indexer = Indexer::forced_import(&outputs_dir)?;
let mut sum = Sats::ZERO; // let mut sum = Sats::ZERO;
let mut count: usize = 0; // let mut count: usize = 0;
for value in indexer.vecs.txoutindex_to_value.clean_iter().unwrap() { // for value in indexer.vecs.txoutindex_to_value.clean_iter()? {
sum += value; // sum += value;
count += 1; // count += 1;
} // }
println!("sum = {sum}, count = {count}"); // println!("sum = {sum}, count = {count}");
dbg!(
indexer
.vecs
.txoutindex_to_value
.iter()?
.enumerate()
.take(200)
// .filter(|(_, op)| !op.is_coinbase())
// .map(|(i, op)| (i, op.txindex(), op.vout()))
.collect::<Vec<_>>()
);
Ok(()) Ok(())
} }

View File

@@ -96,6 +96,7 @@ impl From<(Height, &mut Vecs, &Stores)> for Indexes {
let stores_starting_height = stores.starting_height(); let stores_starting_height = stores.starting_height();
let height = vecs_starting_height.min(stores_starting_height); let height = vecs_starting_height.min(stores_starting_height);
if height < min_height { if height < min_height {
dbg!(height, min_height);
unreachable!() unreachable!()
} }

View File

@@ -17,13 +17,13 @@ use rayon::prelude::*;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use vecdb::{AnyVec, Exit, GenericStoredVec, Reader, TypedVecIterator}; use vecdb::{AnyVec, Exit, GenericStoredVec, Reader, TypedVecIterator};
mod indexes; mod indexes;
mod stores_v2; // mod stores_v2;
// mod stores_v3; mod stores_v3;
mod vecs; mod vecs;
pub use indexes::*; pub use indexes::*;
pub use stores_v2::*; // pub use stores_v2::*;
// pub use stores_v3::*; pub use stores_v3::*;
pub use vecs::*; pub use vecs::*;
// One version for all data sources // One version for all data sources

View File

@@ -186,7 +186,7 @@ impl Stores {
}) })
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
self.database.batch().commit_partitions(tuples)?; self.database.batch().commit_keyspaces(tuples)?;
self.database self.database
.persist(PersistMode::SyncAll) .persist(PersistMode::SyncAll)

View File

@@ -18,6 +18,7 @@ use crate::Indexes;
#[derive(Clone, Traversable)] #[derive(Clone, Traversable)]
pub struct Vecs { pub struct Vecs {
db: Database, db: Database,
pub emptyoutputindex_to_txindex: CompressedVec<EmptyOutputIndex, TxIndex>, pub emptyoutputindex_to_txindex: CompressedVec<EmptyOutputIndex, TxIndex>,
pub height_to_blockhash: RawVec<Height, BlockHash>, pub height_to_blockhash: RawVec<Height, BlockHash>,
pub height_to_difficulty: CompressedVec<Height, StoredF64>, pub height_to_difficulty: CompressedVec<Height, StoredF64>,
@@ -52,7 +53,7 @@ pub struct Vecs {
pub p2wshaddressindex_to_p2wshbytes: RawVec<P2WSHAddressIndex, P2WSHBytes>, pub p2wshaddressindex_to_p2wshbytes: RawVec<P2WSHAddressIndex, P2WSHBytes>,
pub txindex_to_base_size: CompressedVec<TxIndex, StoredU32>, pub txindex_to_base_size: CompressedVec<TxIndex, StoredU32>,
pub txindex_to_first_txinindex: CompressedVec<TxIndex, TxInIndex>, pub txindex_to_first_txinindex: CompressedVec<TxIndex, TxInIndex>,
pub txindex_to_first_txoutindex: CompressedVec<TxIndex, TxOutIndex>, pub txindex_to_first_txoutindex: RawVec<TxIndex, TxOutIndex>,
pub txindex_to_height: CompressedVec<TxIndex, Height>, pub txindex_to_height: CompressedVec<TxIndex, Height>,
pub txindex_to_is_explicitly_rbf: CompressedVec<TxIndex, StoredBool>, pub txindex_to_is_explicitly_rbf: CompressedVec<TxIndex, StoredBool>,
pub txindex_to_rawlocktime: CompressedVec<TxIndex, RawLockTime>, pub txindex_to_rawlocktime: CompressedVec<TxIndex, RawLockTime>,
@@ -167,11 +168,7 @@ impl Vecs {
"first_txinindex", "first_txinindex",
version, version,
)?, )?,
txindex_to_first_txoutindex: CompressedVec::forced_import( txindex_to_first_txoutindex: RawVec::forced_import(&db, "first_txoutindex", version)?,
&db,
"first_txoutindex",
version,
)?,
txindex_to_is_explicitly_rbf: CompressedVec::forced_import( txindex_to_is_explicitly_rbf: CompressedVec::forced_import(
&db, &db,
"is_explicitly_rbf", "is_explicitly_rbf",
@@ -197,6 +194,8 @@ impl Vecs {
.collect(), .collect(),
)?; )?;
this.db.compact()?;
Ok(this) Ok(this)
} }
@@ -420,4 +419,8 @@ impl Vecs {
] ]
.into_iter() .into_iter()
} }
pub fn db(&self) -> &Database {
&self.db
}
} }

View File

@@ -28,8 +28,8 @@ pub fn init(path: Option<&Path>) -> io::Result<()> {
} }
Builder::from_env(Env::default().default_filter_or( Builder::from_env(Env::default().default_filter_or(
"debug,bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off", "debug,bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,rustls=off",
// "debug,fjall=trace,bitcoin=off,bitcoincore-rpc=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off", // "debug,fjall=trace,bitcoin=off,bitcoincore-rpc=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,rustls=off",
)) ))
.format(move |buf, record| { .format(move |buf, record| {
let date_time = Timestamp::now() let date_time = Timestamp::now()

View File

@@ -29,7 +29,8 @@ jiff = { workspace = true }
log = { workspace = true } log = { workspace = true }
quick_cache = "0.6.18" quick_cache = "0.6.18"
schemars = { workspace = true } schemars = { workspace = true }
sonic-rs = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
tower-http = { version = "0.6.6", features = ["compression-full", "trace"] } tower-http = { version = "0.6.6", features = ["compression-full", "trace"] }
tracing = "0.1.41" tracing = "0.1.41"

View File

@@ -3,6 +3,7 @@ use axum::{
http::{Response, StatusCode}, http::{Response, StatusCode},
response::IntoResponse, response::IntoResponse,
}; };
use serde::Serialize;
use super::header_map::HeaderMapExtended; use super::header_map::HeaderMapExtended;
@@ -13,10 +14,10 @@ where
fn new_not_modified() -> Self; fn new_not_modified() -> Self;
fn new_json<T>(value: T, etag: &str) -> Self fn new_json<T>(value: T, etag: &str) -> Self
where where
T: sonic_rs::Serialize; T: Serialize;
fn new_json_with<T>(status: StatusCode, value: T, etag: &str) -> Self fn new_json_with<T>(status: StatusCode, value: T, etag: &str) -> Self
where where
T: sonic_rs::Serialize; T: Serialize;
} }
impl ResponseExtended for Response<Body> { impl ResponseExtended for Response<Body> {
@@ -29,16 +30,16 @@ impl ResponseExtended for Response<Body> {
fn new_json<T>(value: T, etag: &str) -> Self fn new_json<T>(value: T, etag: &str) -> Self
where where
T: sonic_rs::Serialize, T: Serialize,
{ {
Self::new_json_with(StatusCode::default(), value, etag) Self::new_json_with(StatusCode::default(), value, etag)
} }
fn new_json_with<T>(status: StatusCode, value: T, etag: &str) -> Self fn new_json_with<T>(status: StatusCode, value: T, etag: &str) -> Self
where where
T: sonic_rs::Serialize, T: Serialize,
{ {
let bytes = sonic_rs::to_vec(&value).unwrap(); let bytes = serde_json::to_vec(&value).unwrap();
let mut response = Response::builder().body(bytes.into()).unwrap(); let mut response = Response::builder().body(bytes.into()).unwrap();
*response.status_mut() = status; *response.status_mut() = status;
let headers = response.headers_mut(); let headers = response.headers_mut();

View File

@@ -1,5 +1,6 @@
use axum::{http::StatusCode, response::Response}; use axum::{http::StatusCode, response::Response};
use brk_error::{Error, Result}; use brk_error::{Error, Result};
use serde::Serialize;
use crate::extended::ResponseExtended; use crate::extended::ResponseExtended;
@@ -7,7 +8,7 @@ pub trait ResultExtended<T> {
fn with_status(self) -> Result<T, (StatusCode, String)>; fn with_status(self) -> Result<T, (StatusCode, String)>;
fn to_json_response(self, etag: &str) -> Response fn to_json_response(self, etag: &str) -> Response
where where
T: sonic_rs::Serialize; T: Serialize;
} }
impl<T> ResultExtended<T> for Result<T> { impl<T> ResultExtended<T> for Result<T> {
@@ -29,7 +30,7 @@ impl<T> ResultExtended<T> for Result<T> {
fn to_json_response(self, etag: &str) -> Response fn to_json_response(self, etag: &str) -> Response
where where
T: sonic_rs::Serialize, T: Serialize,
{ {
match self.with_status() { match self.with_status() {
Ok(value) => Response::new_json(&value, etag), Ok(value) => Response::new_json(&value, etag),

View File

@@ -6,7 +6,7 @@ use std::{
use derive_deref::{Deref, DerefMut}; use derive_deref::{Deref, DerefMut};
use serde::{Serialize, Serializer, ser::SerializeTuple}; use serde::{Serialize, Serializer, ser::SerializeTuple};
use vecdb::{Compressable, Formattable}; use vecdb::{Compressable, Formattable, TransparentCompressable};
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::StoredF64; use crate::StoredF64;
@@ -77,7 +77,7 @@ impl Formattable for OHLCCents {
} }
} }
#[derive(Debug, Default, Clone, FromBytes, Immutable, IntoBytes, KnownLayout)] #[derive(Debug, Default, Clone, Copy, FromBytes, Immutable, IntoBytes, KnownLayout)]
#[repr(C)] #[repr(C)]
pub struct OHLCDollars { pub struct OHLCDollars {
pub open: Open<Dollars>, pub open: Open<Dollars>,
@@ -86,17 +86,24 @@ pub struct OHLCDollars {
pub close: Close<Dollars>, pub close: Close<Dollars>,
} }
// This is FAKE, just to be able to use EagerVec
// Need to find a better way
impl Compressable for OHLCDollars {
type NumberType = u64;
}
impl TransparentCompressable<u64> for OHLCDollars {}
impl Serialize for OHLCDollars { impl Serialize for OHLCDollars {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
let mut tup = serializer.serialize_tuple(4)?; let mut tuple = serializer.serialize_tuple(4)?;
tup.serialize_element(&self.open)?; tuple.serialize_element(&self.open)?;
tup.serialize_element(&self.high)?; tuple.serialize_element(&self.high)?;
tup.serialize_element(&self.low)?; tuple.serialize_element(&self.low)?;
tup.serialize_element(&self.close)?; tuple.serialize_element(&self.close)?;
tup.end() tuple.end()
} }
} }
@@ -160,7 +167,7 @@ impl Formattable for OHLCDollars {
} }
} }
#[derive(Debug, Default, Clone, FromBytes, Immutable, IntoBytes, KnownLayout)] #[derive(Debug, Default, Clone, Copy, FromBytes, Immutable, IntoBytes, KnownLayout)]
#[repr(C)] #[repr(C)]
pub struct OHLCSats { pub struct OHLCSats {
pub open: Open<Sats>, pub open: Open<Sats>,
@@ -169,17 +176,24 @@ pub struct OHLCSats {
pub close: Close<Sats>, pub close: Close<Sats>,
} }
// This is FAKE, just to be able to use EagerVec
// Need to find a better way
impl Compressable for OHLCSats {
type NumberType = u64;
}
impl TransparentCompressable<u64> for OHLCSats {}
impl Serialize for OHLCSats { impl Serialize for OHLCSats {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
let mut tup = serializer.serialize_tuple(4)?; let mut tuple = serializer.serialize_tuple(4)?;
tup.serialize_element(&self.open)?; tuple.serialize_element(&self.open)?;
tup.serialize_element(&self.high)?; tuple.serialize_element(&self.high)?;
tup.serialize_element(&self.low)?; tuple.serialize_element(&self.low)?;
tup.serialize_element(&self.close)?; tuple.serialize_element(&self.close)?;
tup.end() tuple.end()
} }
} }

View File

@@ -34,14 +34,17 @@ impl OutPoint {
Self(txindex_bits | vout_bits) Self(txindex_bits | vout_bits)
} }
#[inline(always)]
pub fn txindex(self) -> TxIndex { pub fn txindex(self) -> TxIndex {
TxIndex::from((self.0 >> 32) as u32) TxIndex::from((self.0 >> 32) as u32)
} }
#[inline(always)]
pub fn vout(self) -> Vout { pub fn vout(self) -> Vout {
Vout::from(self.0 as u32) Vout::from(self.0 as u32)
} }
#[inline(always)]
pub fn is_coinbase(self) -> bool { pub fn is_coinbase(self) -> bool {
self == Self::COINBASE self == Self::COINBASE
} }

View File

@@ -7,7 +7,7 @@ use bitcoin::Amount;
use derive_deref::Deref; use derive_deref::Deref;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Compressable, Formattable}; use vecdb::{CheckedSub, Compressable, Formattable, SaturatingAdd};
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::StoredF64; use crate::StoredF64;
@@ -97,7 +97,7 @@ impl AddAssign for Sats {
} }
} }
impl CheckedSub<Sats> for Sats { impl CheckedSub for Sats {
fn checked_sub(self, rhs: Self) -> Option<Self> { fn checked_sub(self, rhs: Self) -> Option<Self> {
self.0.checked_sub(rhs.0).map(Self::from) self.0.checked_sub(rhs.0).map(Self::from)
} }
@@ -109,6 +109,12 @@ impl CheckedSub<usize> for Sats {
} }
} }
impl SaturatingAdd for Sats {
fn saturating_add(self, rhs: Self) -> Self {
Self(self.0.saturating_add(rhs.0))
}
}
impl SubAssign for Sats { impl SubAssign for Sats {
fn sub_assign(&mut self, rhs: Self) { fn sub_assign(&mut self, rhs: Self) {
*self = self.checked_sub(rhs).unwrap(); *self = self.checked_sub(rhs).unwrap();