server: mcp + global: refactor

This commit is contained in:
nym21
2025-06-21 12:43:14 +02:00
parent c9e0f9d985
commit c3ae3cb768
92 changed files with 13601 additions and 12554 deletions

413
Cargo.lock generated
View File

@@ -164,9 +164,9 @@ checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]] [[package]]
name = "async-compression" name = "async-compression"
version = "0.4.24" version = "0.4.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d615619615a650c571269c00dca41db04b9210037fa76ed8239f70404ab56985" checksum = "40f6024f3f856663b45fd0c9b6f2024034a702f453549449e0d84a305900dad4"
dependencies = [ dependencies = [
"brotli", "brotli",
"flate2", "flate2",
@@ -202,9 +202,9 @@ dependencies = [
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.4.0" version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]] [[package]]
name = "axum" name = "axum"
@@ -272,7 +272,7 @@ dependencies = [
"miniz_oxide", "miniz_oxide",
"object", "object",
"rustc-demangle", "rustc-demangle",
"windows-targets", "windows-targets 0.52.6",
] ]
[[package]] [[package]]
@@ -452,9 +452,9 @@ dependencies = [
"brk_exit", "brk_exit",
"brk_fetcher", "brk_fetcher",
"brk_indexer", "brk_indexer",
"brk_interface",
"brk_logger", "brk_logger",
"brk_parser", "brk_parser",
"brk_query",
"brk_server", "brk_server",
"brk_state", "brk_state",
"brk_store", "brk_store",
@@ -482,9 +482,9 @@ dependencies = [
"brk_exit", "brk_exit",
"brk_fetcher", "brk_fetcher",
"brk_indexer", "brk_indexer",
"brk_interface",
"brk_logger", "brk_logger",
"brk_parser", "brk_parser",
"brk_query",
"brk_server", "brk_server",
"brk_vec", "brk_vec",
"clap", "clap",
@@ -555,7 +555,6 @@ dependencies = [
"brk_core", "brk_core",
"brk_logger", "brk_logger",
"color-eyre", "color-eyre",
"jiff",
"log", "log",
"minreq", "minreq",
"serde_json", "serde_json",
@@ -579,6 +578,24 @@ dependencies = [
"rayon", "rayon",
] ]
[[package]]
name = "brk_interface"
version = "0.0.66"
dependencies = [
"brk_computer",
"brk_core",
"brk_indexer",
"brk_vec",
"color-eyre",
"derive_deref",
"rmcp",
"schemars",
"serde",
"serde_json",
"serde_with",
"tabled",
]
[[package]] [[package]]
name = "brk_logger" name = "brk_logger"
version = "0.0.66" version = "0.0.66"
@@ -604,24 +621,6 @@ dependencies = [
"zerocopy", "zerocopy",
] ]
[[package]]
name = "brk_query"
version = "0.0.66"
dependencies = [
"brk_computer",
"brk_core",
"brk_indexer",
"brk_vec",
"clap",
"clap_derive",
"color-eyre",
"derive_deref",
"serde",
"serde_json",
"serde_with",
"tabled",
]
[[package]] [[package]]
name = "brk_rolldown" name = "brk_rolldown"
version = "0.0.1" version = "0.0.1"
@@ -939,9 +938,9 @@ dependencies = [
"brk_exit", "brk_exit",
"brk_fetcher", "brk_fetcher",
"brk_indexer", "brk_indexer",
"brk_interface",
"brk_logger", "brk_logger",
"brk_parser", "brk_parser",
"brk_query",
"brk_vec", "brk_vec",
"clap", "clap",
"clap_derive", "clap_derive",
@@ -949,7 +948,7 @@ dependencies = [
"jiff", "jiff",
"log", "log",
"minreq", "minreq",
"oxc", "rmcp",
"serde", "serde",
"tokio", "tokio",
"tower-http", "tower-http",
@@ -963,9 +962,7 @@ version = "0.0.66"
dependencies = [ dependencies = [
"bincode", "bincode",
"brk_core", "brk_core",
"brk_store",
"derive_deref", "derive_deref",
"fjall",
"serde", "serde",
"zerocopy", "zerocopy",
"zerocopy-derive", "zerocopy-derive",
@@ -1136,8 +1133,10 @@ checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d"
dependencies = [ dependencies = [
"android-tzdata", "android-tzdata",
"iana-time-zone", "iana-time-zone",
"js-sys",
"num-traits", "num-traits",
"serde", "serde",
"wasm-bindgen",
"windows-link", "windows-link",
] ]
@@ -1570,12 +1569,12 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]] [[package]]
name = "errno" name = "errno"
version = "0.3.12" version = "0.3.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18" checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.59.0", "windows-sys 0.60.2",
] ]
[[package]] [[package]]
@@ -2191,15 +2190,15 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.173" version = "0.2.174"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8cfeafaffdbc32176b64fb251369d52ea9f0a8fbc6f8759edffef7b525d64bb" checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
[[package]] [[package]]
name = "liblzma" name = "liblzma"
version = "0.4.1" version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66352d7a8ac12d4877b6e6ea5a9b7650ee094257dc40889955bea5bc5b08c1d0" checksum = "0791ab7e08ccc8e0ce893f6906eb2703ed8739d8e89b57c0714e71bad09024c8"
dependencies = [ dependencies = [
"liblzma-sys", "liblzma-sys",
] ]
@@ -2283,9 +2282,9 @@ dependencies = [
[[package]] [[package]]
name = "lz4_flex" name = "lz4_flex"
version = "0.11.4" version = "0.11.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c592ad9fbc1b7838633b3ae55ce69b17d01150c72fcef229fbb819d39ee51ee" checksum = "08ab2867e3eeeca90e844d1940eab391c9dc5228783db2ed999acbc0a9ed375a"
[[package]] [[package]]
name = "matchers" name = "matchers"
@@ -2495,9 +2494,9 @@ checksum = "26995317201fa17f3656c36716aed4a7c81743a9634ac4c99c0eeda495db0cec"
[[package]] [[package]]
name = "oxc" name = "oxc"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0053b9142d0776d038db03b967c87ebafa5bffdc4150183b5540adbcbc00fabe" checksum = "2e31318da7866fb30c000877ae98c30d1f76212e4bdc6c088c5cb847e5d40954"
dependencies = [ dependencies = [
"oxc_allocator", "oxc_allocator",
"oxc_ast", "oxc_ast",
@@ -2519,9 +2518,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc-browserslist" name = "oxc-browserslist"
version = "2.0.5" version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08f56cc6eaa393139eda31fc22b4638e3d3e58496122c1738026aa7c4a9631f9" checksum = "5c7bcc31e3014fa4226346b8596cda0f5cb5797045fa2760c6b871407db5642c"
dependencies = [ dependencies = [
"nom", "nom",
"rustc-hash", "rustc-hash",
@@ -2558,9 +2557,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_allocator" name = "oxc_allocator"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ead9af98156b7a80512e1d0db127572ee4a8de43b41eaf5b797482c8530fdad" checksum = "f4cb225affc487a1bc867455220d5427d0f2a35ed25d896f99bb3b912d49fb9e"
dependencies = [ dependencies = [
"allocator-api2", "allocator-api2",
"bumpalo", "bumpalo",
@@ -2573,9 +2572,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_ast" name = "oxc_ast"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bac5f46a96dd0bc9f63b9745568471b42cb0b617716d311c34a79f68e355796a" checksum = "9ced8dcc14d588fa32594d70ff8f194712036d02d7a96718bce38abbfec72ed6"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"oxc_allocator", "oxc_allocator",
@@ -2589,9 +2588,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_ast_macros" name = "oxc_ast_macros"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2c25817869aab36130e4d0c7cf8630582e408bff34b95fb66a03512bb3defe5" checksum = "43494643bd6d76a62446c58ae98568bf630c0bdd90726d7956d3f8e1e17f5906"
dependencies = [ dependencies = [
"phf", "phf",
"proc-macro2", "proc-macro2",
@@ -2601,9 +2600,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_ast_visit" name = "oxc_ast_visit"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e056ca12ae23492897b2d9d915581c03cd1b2d6158c52d67c67bcc3ceebdae18" checksum = "761d7f84b87080cf337c843fa60a1c3f50eb53eba0ec1e42d8758e99cd834031"
dependencies = [ dependencies = [
"oxc_allocator", "oxc_allocator",
"oxc_ast", "oxc_ast",
@@ -2614,9 +2613,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_cfg" name = "oxc_cfg"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "424c8259dc5bbd4f38b63ca6498e3140bd0ddff9f105599cc060013c89c94160" checksum = "4d561bbe6e05cd3be34802a22e799c3c9bd5987daee2dbb51b2561ab78d39420"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"itertools", "itertools",
@@ -2629,9 +2628,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_codegen" name = "oxc_codegen"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e28e1e25e206a06c8a2b40c78b51e7eadfd9f931494d72cc740bf6fba66890d1" checksum = "d694cadd4d0d86831197d720ad245ea3c82366db778230dfa8d80475d2049c80"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"cow-utils", "cow-utils",
@@ -2650,9 +2649,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_data_structures" name = "oxc_data_structures"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb7bba3e8f1e9b15d80b192f87b4e05aa6fbaa03e16fd62d6c087a2427caf8ab" checksum = "1ebcebde288c0dbc9b8bb7ecf8d4eb3d64c6f122609fbca9f89dce356786fa19"
dependencies = [ dependencies = [
"ropey", "ropey",
"rustversion", "rustversion",
@@ -2660,9 +2659,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_diagnostics" name = "oxc_diagnostics"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "893f091e2c224765d9b156b90c9ce09d724f60144549db05f685919585cf745e" checksum = "136bd60c8d25e6c1b879a01d5f52ad260385afa2d74b0aa81c1cbc9680b468ef"
dependencies = [ dependencies = [
"cow-utils", "cow-utils",
"oxc-miette", "oxc-miette",
@@ -2670,9 +2669,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_ecmascript" name = "oxc_ecmascript"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31554a9681ebafe20e175b0a99fdee45ddf38285a5e9ca4bdefe223779963870" checksum = "405573ecc303a3e9852b2873aad30e8249b3e3cb668cf265f53a9072dada8d50"
dependencies = [ dependencies = [
"num-bigint", "num-bigint",
"num-traits", "num-traits",
@@ -2683,9 +2682,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_estree" name = "oxc_estree"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19c82ec9b051aab8fbbd68366ad90de958dd32365f9f445c89395ad42cf050fe" checksum = "bcd0073f1b212fda3c1e37728db069bf22d136a90f58da4f6214a6ada7552dde"
dependencies = [ dependencies = [
"itoa", "itoa",
"oxc_data_structures", "oxc_data_structures",
@@ -2704,9 +2703,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_isolated_declarations" name = "oxc_isolated_declarations"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e65c8ab6a35eae3d5f438055e6240cb014be3d2833c4c5411f7b1110d899408a" checksum = "cc08c216028563ebe9f35e9ff661c24d5f885735775d21fc00d579f7a06db10a"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"oxc_allocator", "oxc_allocator",
@@ -2721,9 +2720,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_mangler" name = "oxc_mangler"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7e41bc4d7628015266d35f14e35db6e2a89d8d54235859acf56aa8146dbcef6" checksum = "b301242a458a38572ec3ffd73d836de068019d605a49360f0c80ff9fcdbb0485"
dependencies = [ dependencies = [
"fixedbitset", "fixedbitset",
"itertools", "itertools",
@@ -2738,9 +2737,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_minifier" name = "oxc_minifier"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b9e62020bc8ee278dfb29f396682a50adbe9981895bf10580133cddf50eea43" checksum = "87d130925e189994a4695d7379f14fd427f347642741d8180baa0b8a6699607b"
dependencies = [ dependencies = [
"cow-utils", "cow-utils",
"oxc_allocator", "oxc_allocator",
@@ -2760,9 +2759,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_parser" name = "oxc_parser"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "120aa69d4df56d66fc5f0a525c5ff02369cee5f97289ea20b533b7f76530413b" checksum = "4c0b65b7f87759287bc7c8d394733cda9bca5e14fe7b71388932c926f8cde67b"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"cow-utils", "cow-utils",
@@ -2783,9 +2782,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_regular_expression" name = "oxc_regular_expression"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f22f6568dd7bb5e84eb45a22bda827d5735f09a95ca8e3e233be56516fdc3d9b" checksum = "0899e918e7da993922ac9d85a7cacefc5519afbab002a4e239aa0a8dc2201297"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"oxc_allocator", "oxc_allocator",
@@ -2818,9 +2817,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_semantic" name = "oxc_semantic"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5345a0f4645e74bc85bfb3bdc6f0d85b0810fd49f9947c85bc4be16feb901a2" checksum = "b916b0e8bf2045895726a5069947789226eaae47e0f27412956163e02bd20816"
dependencies = [ dependencies = [
"itertools", "itertools",
"oxc_allocator", "oxc_allocator",
@@ -2855,9 +2854,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_span" name = "oxc_span"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a54f79470afa4c6cea26c69a0af0fe9ba61f4108b4254fe3170ae6fce04fa11" checksum = "d51599c317d4e19c07978bde4af8a40844a2410fb7b455836c3441a41865acfa"
dependencies = [ dependencies = [
"compact_str", "compact_str",
"oxc-miette", "oxc-miette",
@@ -2869,9 +2868,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_syntax" name = "oxc_syntax"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b27a8e08726b8d2db468f69110a587c1b2e01fa82e0c58f3519dbcebaaee267f" checksum = "1ee16107642e60a1f53dac2dbaaf1a1cdc696c0f7427946f8b8906f3e09a85f0"
dependencies = [ dependencies = [
"bitflags 2.9.1", "bitflags 2.9.1",
"cow-utils", "cow-utils",
@@ -2891,9 +2890,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_transformer" name = "oxc_transformer"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e71275f58fd5cce88f19749c06e2b8faa6d742f886a88f985fdac65a7625b858" checksum = "4bd95eedb44dd519dfe157334512977de744725cda45d4c9005369e3f155d6b4"
dependencies = [ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"compact_str", "compact_str",
@@ -2922,9 +2921,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_transformer_plugins" name = "oxc_transformer_plugins"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07776f86bc1bd06ec25ec14ea2616b82ec237c57c8a7683b3d0eedacac80e0c3" checksum = "b3aa2124adf31739ae94ca9ee5ecb727b42ad7bc3e33ca072a6c9fa791d9f7b5"
dependencies = [ dependencies = [
"cow-utils", "cow-utils",
"itoa", "itoa",
@@ -2944,9 +2943,9 @@ dependencies = [
[[package]] [[package]]
name = "oxc_traverse" name = "oxc_traverse"
version = "0.73.0" version = "0.73.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b85b10812142ec2df01b4bedd1a76e298883ffb1fa73a4e4a16013467e9f76" checksum = "fdd6b3996c515cf7ce1c204b3b01a5a1469462412885ea6fa7f0d236d8f2c454"
dependencies = [ dependencies = [
"itoa", "itoa",
"oxc_allocator", "oxc_allocator",
@@ -2991,9 +2990,15 @@ dependencies = [
"libc", "libc",
"redox_syscall", "redox_syscall",
"smallvec", "smallvec",
"windows-targets", "windows-targets 0.52.6",
] ]
[[package]]
name = "paste"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]] [[package]]
name = "path-absolutize" name = "path-absolutize"
version = "3.1.1" version = "3.1.1"
@@ -3057,7 +3062,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
dependencies = [ dependencies = [
"phf_shared", "phf_shared",
"rand", "rand 0.8.5",
] ]
[[package]] [[package]]
@@ -3202,9 +3207,9 @@ dependencies = [
[[package]] [[package]]
name = "r-efi" name = "r-efi"
version = "5.2.0" version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]] [[package]]
name = "rand" name = "rand"
@@ -3213,8 +3218,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [ dependencies = [
"libc", "libc",
"rand_chacha", "rand_chacha 0.3.1",
"rand_core", "rand_core 0.6.4",
]
[[package]]
name = "rand"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.3",
] ]
[[package]] [[package]]
@@ -3224,7 +3239,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [ dependencies = [
"ppv-lite86", "ppv-lite86",
"rand_core", "rand_core 0.6.4",
]
[[package]]
name = "rand_chacha"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
dependencies = [
"ppv-lite86",
"rand_core 0.9.3",
] ]
[[package]] [[package]]
@@ -3236,6 +3261,15 @@ dependencies = [
"getrandom 0.2.16", "getrandom 0.2.16",
] ]
[[package]]
name = "rand_core"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
dependencies = [
"getrandom 0.3.3",
]
[[package]] [[package]]
name = "rapidhash" name = "rapidhash"
version = "1.4.0" version = "1.4.0"
@@ -3368,6 +3402,44 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "rmcp"
version = "0.1.6"
dependencies = [
"base64 0.22.1",
"bytes",
"chrono",
"futures",
"http",
"http-body",
"http-body-util",
"paste",
"pin-project-lite",
"rand 0.9.1",
"rmcp-macros",
"schemars",
"serde",
"serde_json",
"sse-stream",
"thiserror 2.0.12",
"tokio",
"tokio-stream",
"tokio-util",
"tower-service",
"tracing",
"uuid",
]
[[package]]
name = "rmcp-macros"
version = "0.1.6"
dependencies = [
"proc-macro2",
"quote",
"serde_json",
"syn 2.0.103",
]
[[package]] [[package]]
name = "rolldown-ariadne" name = "rolldown-ariadne"
version = "0.5.2" version = "0.5.2"
@@ -3470,10 +3542,23 @@ checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f"
dependencies = [ dependencies = [
"dyn-clone", "dyn-clone",
"ref-cast", "ref-cast",
"schemars_derive",
"serde", "serde",
"serde_json", "serde_json",
] ]
[[package]]
name = "schemars_derive"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5016d94c77c6d32f0b8e08b781f7dc8a90c2007d4e77472cc2807bc10a8438fe"
dependencies = [
"proc-macro2",
"quote",
"serde_derive_internals",
"syn 2.0.103",
]
[[package]] [[package]]
name = "scopeguard" name = "scopeguard"
version = "1.2.0" version = "1.2.0"
@@ -3497,7 +3582,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9465315bc9d4566e1724f0fffcbcc446268cb522e60f9a27bcded6b19c108113" checksum = "9465315bc9d4566e1724f0fffcbcc446268cb522e60f9a27bcded6b19c108113"
dependencies = [ dependencies = [
"bitcoin_hashes", "bitcoin_hashes",
"rand", "rand 0.8.5",
"secp256k1-sys", "secp256k1-sys",
"serde", "serde",
] ]
@@ -3562,6 +3647,17 @@ dependencies = [
"syn 2.0.103", "syn 2.0.103",
] ]
[[package]]
name = "serde_derive_internals"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.103",
]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.140" version = "1.0.140"
@@ -3709,6 +3805,19 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "sse-stream"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb4dc4d33c68ec1f27d386b5610a351922656e1fdf5c05bbaad930cd1519479a"
dependencies = [
"bytes",
"futures-util",
"http-body",
"http-body-util",
"pin-project-lite",
]
[[package]] [[package]]
name = "static_assertions" name = "static_assertions"
version = "1.1.0" version = "1.1.0"
@@ -3926,6 +4035,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779" checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"bytes",
"libc", "libc",
"mio", "mio",
"pin-project-lite", "pin-project-lite",
@@ -3945,6 +4055,17 @@ dependencies = [
"syn 2.0.103", "syn 2.0.103",
] ]
[[package]]
name = "tokio-stream"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]] [[package]]
name = "tokio-util" name = "tokio-util"
version = "0.7.15" version = "0.7.15"
@@ -4061,9 +4182,9 @@ dependencies = [
[[package]] [[package]]
name = "tracing-attributes" name = "tracing-attributes"
version = "0.1.29" version = "0.1.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1ffbcf9c6f6b99d386e7444eb608ba646ae452a36b39737deb9663b610f662" checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -4244,6 +4365,7 @@ version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
dependencies = [ dependencies = [
"getrandom 0.3.3",
"js-sys", "js-sys",
"wasm-bindgen", "wasm-bindgen",
] ]
@@ -4499,7 +4621,7 @@ version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [ dependencies = [
"windows-targets", "windows-targets 0.52.6",
] ]
[[package]] [[package]]
@@ -4508,7 +4630,16 @@ version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [ dependencies = [
"windows-targets", "windows-targets 0.52.6",
]
[[package]]
name = "windows-sys"
version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
dependencies = [
"windows-targets 0.53.2",
] ]
[[package]] [[package]]
@@ -4517,14 +4648,30 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [ dependencies = [
"windows_aarch64_gnullvm", "windows_aarch64_gnullvm 0.52.6",
"windows_aarch64_msvc", "windows_aarch64_msvc 0.52.6",
"windows_i686_gnu", "windows_i686_gnu 0.52.6",
"windows_i686_gnullvm", "windows_i686_gnullvm 0.52.6",
"windows_i686_msvc", "windows_i686_msvc 0.52.6",
"windows_x86_64_gnu", "windows_x86_64_gnu 0.52.6",
"windows_x86_64_gnullvm", "windows_x86_64_gnullvm 0.52.6",
"windows_x86_64_msvc", "windows_x86_64_msvc 0.52.6",
]
[[package]]
name = "windows-targets"
version = "0.53.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef"
dependencies = [
"windows_aarch64_gnullvm 0.53.0",
"windows_aarch64_msvc 0.53.0",
"windows_i686_gnu 0.53.0",
"windows_i686_gnullvm 0.53.0",
"windows_i686_msvc 0.53.0",
"windows_x86_64_gnu 0.53.0",
"windows_x86_64_gnullvm 0.53.0",
"windows_x86_64_msvc 0.53.0",
] ]
[[package]] [[package]]
@@ -4533,48 +4680,96 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
[[package]] [[package]]
name = "windows_aarch64_msvc" name = "windows_aarch64_msvc"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_aarch64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
[[package]] [[package]]
name = "windows_i686_gnu" name = "windows_i686_gnu"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
[[package]] [[package]]
name = "windows_i686_gnullvm" name = "windows_i686_gnullvm"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
[[package]] [[package]]
name = "windows_i686_msvc" name = "windows_i686_msvc"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_i686_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
[[package]] [[package]]
name = "windows_x86_64_gnu" name = "windows_x86_64_gnu"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
[[package]] [[package]]
name = "windows_x86_64_gnullvm" name = "windows_x86_64_gnullvm"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
[[package]] [[package]]
name = "windows_x86_64_msvc" name = "windows_x86_64_msvc"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "windows_x86_64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]] [[package]]
name = "winnow" name = "winnow"
version = "0.7.11" version = "0.7.11"
@@ -4607,18 +4802,18 @@ checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
[[package]] [[package]]
name = "zerocopy" name = "zerocopy"
version = "0.8.25" version = "0.8.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f"
dependencies = [ dependencies = [
"zerocopy-derive", "zerocopy-derive",
] ]
[[package]] [[package]]
name = "zerocopy-derive" name = "zerocopy-derive"
version = "0.8.25" version = "0.8.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef" checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",

View File

@@ -31,7 +31,7 @@ brk_fetcher = { version = "0.0.66", path = "crates/brk_fetcher" }
brk_indexer = { version = "0.0.66", path = "crates/brk_indexer" } brk_indexer = { version = "0.0.66", path = "crates/brk_indexer" }
brk_logger = { version = "0.0.66", path = "crates/brk_logger" } brk_logger = { version = "0.0.66", path = "crates/brk_logger" }
brk_parser = { version = "0.0.66", path = "crates/brk_parser" } brk_parser = { version = "0.0.66", path = "crates/brk_parser" }
brk_query = { version = "0.0.66", path = "crates/brk_query" } brk_interface = { version = "0.0.66", path = "crates/brk_interface" }
brk_server = { version = "0.0.66", path = "crates/brk_server" } brk_server = { version = "0.0.66", path = "crates/brk_server" }
brk_state = { version = "0.0.66", path = "crates/brk_state" } brk_state = { version = "0.0.66", path = "crates/brk_state" }
brk_store = { version = "0.0.66", path = "crates/brk_store" } brk_store = { version = "0.0.66", path = "crates/brk_store" }
@@ -46,14 +46,18 @@ jiff = "0.2.15"
log = { version = "0.4.27" } log = { version = "0.4.27" }
minreq = { version = "2.13.4", features = ["https", "serde_json"] } minreq = { version = "2.13.4", features = ["https", "serde_json"] }
rayon = "1.10.0" rayon = "1.10.0"
rmcp = { path = "../rust-sdk/crates/rmcp", features = ["transport-streamable-http-server", "transport-worker"] }
schemars = "0.9.0"
# rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", branch = "main" , features = ["transport-streamable-http-server", "transport-worker"] }
# schemars = "0.8.0"
serde = { version = "1.0.219" } serde = { version = "1.0.219" }
serde_bytes = "0.11.17" serde_bytes = "0.11.17"
serde_derive = "1.0.219" serde_derive = "1.0.219"
serde_json = { version = "1.0.140", features = ["float_roundtrip"] } serde_json = { version = "1.0.140", features = ["float_roundtrip"] }
tabled = "0.20.0" tabled = "0.20.0"
tokio = { version = "1.45.1", features = ["rt-multi-thread"] } tokio = { version = "1.45.1", features = ["rt-multi-thread"] }
zerocopy = { version = "0.8.25" } zerocopy = { version = "0.8.26" }
zerocopy-derive = "0.8.25" zerocopy-derive = "0.8.26"
[workspace.metadata.release] [workspace.metadata.release]
shared-version = true shared-version = true

View File

@@ -42,7 +42,7 @@ The toolkit can be used in various ways to accommodate as many needs as possible
It has a wide range of functionalities including charts, tables and simulations which you can visit for free and without the need for an account. \ It has a wide range of functionalities including charts, tables and simulations which you can visit for free and without the need for an account. \
Also available at: [kibo.money](https://kibo.money) // [satonomics.xyz](https://satonomics.xyz) Also available at: [kibo.money](https://kibo.money) // [satonomics.xyz](https://satonomics.xyz)
- **[API](https://github.com/bitcoinresearchkit/brk/tree/main/crates/brk_server#endpoints)** \ - **[API](https://github.com/bitcoinresearchkit/brk/tree/main/crates/brk_server#endpoints)** \
Researchers and developers are free to use BRK's public API with ![Datasets variant count](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fbitcoinresearchkit.org%2Fapi%2Fvecs%2Fvariant-count&query=%24&style=flat&label=%20&color=white) dataset variants at your disposal. \ Researchers and developers are free to use BRK's public API with ![Datasets variant count](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fbitcoinresearchkit.org%2Fapi%2Fvecs%2Fvariant-count&query=%24&style=flat&label=%20&color=white) dataset variants at their disposal. \
Just like the website, it's entirely free, with no authentication or rate-limiting. Just like the website, it's entirely free, with no authentication or rate-limiting.
- **[CLI](https://crates.io/crates/brk_cli)** \ - **[CLI](https://crates.io/crates/brk_cli)** \
Node runners are strongly encouraged to try out and self-host their own instance using BRK's command line interface. \ Node runners are strongly encouraged to try out and self-host their own instance using BRK's command line interface. \
@@ -58,7 +58,7 @@ In contrast, existing alternatives tend to be either [very costly](https://studi
## Crates ## Crates
- [`brk`](https://crates.io/crates/brk): Wrapper around all other `brk-*` crates - [`brk`](https://crates.io/crates/brk): Wrapper around all other `brk-*` crates
- [`brk_cli`](https://crates.io/crates/brk_cli): A standalone command line interface to interact with the Bitcoin Research Kit - [`brk_cli`](https://crates.io/crates/brk_cli): A command line interface to run a Bitcoin Research Kit instance
- [`brk_computer`](https://crates.io/crates/brk_computer): A Bitcoin dataset computer, built on top of brk_indexer - [`brk_computer`](https://crates.io/crates/brk_computer): A Bitcoin dataset computer, built on top of brk_indexer
- [`brk_core`](https://crates.io/crates/brk_core): The Core (Structs and Errors) of the Bitcoin Research Kit - [`brk_core`](https://crates.io/crates/brk_core): The Core (Structs and Errors) of the Bitcoin Research Kit
- [`brk_exit`](https://crates.io/crates/brk_exit): An exit blocker built on top of ctrlc - [`brk_exit`](https://crates.io/crates/brk_exit): An exit blocker built on top of ctrlc
@@ -66,7 +66,7 @@ In contrast, existing alternatives tend to be either [very costly](https://studi
- [`brk_indexer`](https://crates.io/crates/brk_indexer): A Bitcoin Core indexer built on top of brk_parser - [`brk_indexer`](https://crates.io/crates/brk_indexer): A Bitcoin Core indexer built on top of brk_parser
- [`brk_logger`](https://crates.io/crates/brk_logger): A clean logger used in the Bitcoin Research Kit. - [`brk_logger`](https://crates.io/crates/brk_logger): A clean logger used in the Bitcoin Research Kit.
- [`brk_parser`](https://crates.io/crates/brk_parser): A very fast Bitcoin Core block parser and iterator built on top of bitcoin-rust - [`brk_parser`](https://crates.io/crates/brk_parser): A very fast Bitcoin Core block parser and iterator built on top of bitcoin-rust
- [`brk_query`](https://crates.io/crates/brk_query): A library that finds requested datasets. - [`brk_interface`](https://crates.io/crates/brk_interface): An interface to BRK's engine
- [`brk_server`](https://crates.io/crates/brk_server): A server that serves Bitcoin data and swappable front-ends, built on top of `brk_indexer`, `brk_fetcher` and `brk_computer` - [`brk_server`](https://crates.io/crates/brk_server): A server that serves Bitcoin data and swappable front-ends, built on top of `brk_indexer`, `brk_fetcher` and `brk_computer`
- [`brk_state`](https://crates.io/crates/brk_state): Various states used mainly by the computer - [`brk_state`](https://crates.io/crates/brk_state): Various states used mainly by the computer
- [`brk_store`](https://crates.io/crates/brk_store): A thin wrapper around [`fjall`](https://crates.io/crates/fjall) - [`brk_store`](https://crates.io/crates/brk_store): A thin wrapper around [`fjall`](https://crates.io/crates/fjall)
@@ -92,7 +92,7 @@ Pricing: `0.01 BTC / month` *or* `0.1 BTC / year`
Deepest gratitude to the [Open Sats](https://opensats.org/) public charity. Their grant — from December 2024 to the present — has been critical in sustaining this project. Deepest gratitude to the [Open Sats](https://opensats.org/) public charity. Their grant — from December 2024 to the present — has been critical in sustaining this project.
Heartfelt thanks go out to every donor on [Nostr](https://primal.net/p/npub1jagmm3x39lmwfnrtvxcs9ac7g300y3dusv9lgzhk2e4x5frpxlrqa73v44) and [Geyser.fund](https://geyser.fund/project/brk) whose support has ensured the availability of the [kibo.money](https://kibo.money) public instance. Heartfelt thanks go out to every donor on [Nostr](https://primal.net/p/npub1jagmm3x39lmwfnrtvxcs9ac7g300y3dusv9lgzhk2e4x5frpxlrqa73v44) and [Geyser.fund](https://geyser.fund/project/brk) whose support has ensured the availability of the [bitcoinresearchkit.org](https://bitcoinresearchkit.org) public instance.
## Donate ## Donate

View File

@@ -18,7 +18,7 @@ full = [
"indexer", "indexer",
"logger", "logger",
"parser", "parser",
"query", "interface",
"server", "server",
"state", "state",
"store", "store",
@@ -32,7 +32,7 @@ fetcher = ["brk_fetcher"]
indexer = ["brk_indexer"] indexer = ["brk_indexer"]
logger = ["brk_logger"] logger = ["brk_logger"]
parser = ["brk_parser"] parser = ["brk_parser"]
query = ["brk_query"] interface = ["brk_interface"]
server = ["brk_server"] server = ["brk_server"]
state = ["brk_state"] state = ["brk_state"]
store = ["brk_store"] store = ["brk_store"]
@@ -48,7 +48,7 @@ brk_fetcher = { workspace = true, optional = true }
brk_indexer = { workspace = true, optional = true } brk_indexer = { workspace = true, optional = true }
brk_logger = { workspace = true, optional = true } brk_logger = { workspace = true, optional = true }
brk_parser = { workspace = true, optional = true } brk_parser = { workspace = true, optional = true }
brk_query = { workspace = true, optional = true } brk_interface = { workspace = true, optional = true }
brk_server = { workspace = true, optional = true } brk_server = { workspace = true, optional = true }
brk_state = { workspace = true, optional = true } brk_state = { workspace = true, optional = true }
brk_store = { workspace = true, optional = true } brk_store = { workspace = true, optional = true }

View File

@@ -35,9 +35,9 @@ pub use brk_logger as logger;
#[doc(inline)] #[doc(inline)]
pub use brk_parser as parser; pub use brk_parser as parser;
#[cfg(feature = "query")] #[cfg(feature = "interface")]
#[doc(inline)] #[doc(inline)]
pub use brk_query as query; pub use brk_interface as interface;
#[cfg(feature = "server")] #[cfg(feature = "server")]
#[doc(inline)] #[doc(inline)]

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "brk_cli" name = "brk_cli"
description = "A command line interface to interact with the full Bitcoin Research Kit" description = "A command line interface to run a Bitcoin Research Kit instance"
version.workspace = true version.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true
@@ -16,7 +16,7 @@ brk_fetcher = { workspace = true }
brk_indexer = { workspace = true } brk_indexer = { workspace = true }
brk_logger = { workspace = true } brk_logger = { workspace = true }
brk_parser = { workspace = true } brk_parser = { workspace = true }
brk_query = { workspace = true } brk_interface = { workspace = true }
brk_server = { workspace = true } brk_server = { workspace = true }
brk_vec = { workspace = true } brk_vec = { workspace = true }
clap = { workspace = true } clap = { workspace = true }

View File

@@ -1,4 +1,4 @@
# BRK Cli # BRK CLI
<p align="left"> <p align="left">
<a href="https://github.com/bitcoinresearchkit/brk"> <a href="https://github.com/bitcoinresearchkit/brk">
@@ -31,9 +31,11 @@
</a> </a>
</p> </p>
A command line interface to interact with the full Bitcoin Research Kit. It's built on top of every other create and gives the possility to use BRK using the terminal instead of Rust. A command line interface to run a Bitcoin Research Kit instance.
It has 2 commands (other than `help` and `version`) which are `run` and `query`. The former is used to run the processing (indexer + computer) and/or the server. The latter uses `brk_query` as its backend just like to server to be able to get datasets via the terminal instead of the API. Both commands are custumizable by supporting all the parameters of their Rust counterparts ([`run`](https://github.com/bitcoinresearchkit/brk/blob/c9c6b583338203b2b11bdf31e961b1c306f5d82b/crates/brk_cli/src/run.rs#L110-L191), and [`query`](https://github.com/bitcoinresearchkit/brk/blob/main/crates/brk_query/src/params.rs)). It's very customizable with all parameters from the underlying tools (crates) used inside.
Run `brk -h` for more information.
## Requirements ## Requirements
@@ -55,10 +57,9 @@ To be determined
- [Rust](https://www.rust-lang.org/tools/install) - [Rust](https://www.rust-lang.org/tools/install)
- Unix based operating system (Mac OS or Linux) - Unix based operating system (Mac OS or Linux)
> [!IMPORTANT] > [!IMPORTANT]
> Ubuntu users need to install `open-ssl` via `sudo apt install libssl-dev pkg-config` > Ubuntu users need to install `open-ssl` via `sudo apt install libssl-dev pkg-config`
## Download ## Download
### Binaries ### Binaries
@@ -85,12 +86,11 @@ cargo run -r
## Usage ## Usage
Run `brk -h` to view each available command and their respective description. Run `brk -h` to view each available parameter and their respective description.
`-h` works also for commands, so `brk run -h` will enumerate all the parameters of `brk run`. > [!TIP]
> Every parameter set will be saved at `~/.brk/config.toml`, which allows you to simply run `brk` next time.
> [!TIP]
> Every parameter set for `brk run` will be saved at `~/.brk/config.toml`, which allows you to simply run `brk run` next time.
## Tunnel
The easiest way to let others access your server is to use `cloudflared` which will also cache requests. For more information see [Cloudflare Tunnel](https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/) documentation. The easiest way to let others access your server is to use `cloudflared` which will also cache requests. For more information see [Cloudflare Tunnel](https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/) documentation.

View File

@@ -0,0 +1,362 @@
use std::{
fs,
path::{Path, PathBuf},
};
use bitcoincore_rpc::{self, Auth, Client};
use brk_core::{default_bitcoin_path, default_brk_path, default_on_error, dot_brk_path};
use brk_fetcher::Fetcher;
use brk_server::Website;
use brk_vec::{Computation, Format};
use clap::Parser;
use clap_derive::Parser;
use color_eyre::eyre::eyre;
use serde::{Deserialize, Serialize};
use crate::services::Services;
#[derive(Parser, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)]
pub struct Config {
/// Bitcoin main directory path, defaults: ~/.bitcoin, ~/Library/Application\ Support/Bitcoin, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PATH")]
bitcoindir: Option<String>,
/// Bitcoin blocks directory path, default: --bitcoindir/blocks, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PATH")]
blocksdir: Option<String>,
/// Bitcoin Research Kit outputs directory path, default: ~/.brk, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PATH")]
brkdir: Option<String>,
/// Activated services, default: all, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short, long)]
services: Option<Services>,
/// Computation of computed datasets, `lazy` computes data whenever requested without saving it, `eager` computes the data once and saves it to disk, default: `lazy`, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short, long)]
computation: Option<Computation>,
/// Format of computed datasets, `compressed` to save disk space (experimental), `raw` to prioritize speed, default: `raw`, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short, long)]
format: Option<Format>,
/// Activate fetching prices from exchanges APIs and the computation of all related datasets, default: true, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short = 'F', long, value_name = "BOOL")]
fetch: Option<bool>,
/// Website served by the server (if active), default: default, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short, long)]
website: Option<Website>,
/// Bitcoin RPC ip, default: localhost, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "IP")]
rpcconnect: Option<String>,
/// Bitcoin RPC port, default: 8332, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PORT")]
rpcport: Option<u16>,
/// Bitcoin RPC cookie file, default: --bitcoindir/.cookie, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PATH")]
rpccookiefile: Option<String>,
/// Bitcoin RPC username, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "USERNAME")]
rpcuser: Option<String>,
/// Bitcoin RPC password, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PASSWORD")]
rpcpassword: Option<String>,
/// Delay between runs, default: 0, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "SECONDS")]
delay: Option<u64>,
/// DEV: Activate to watch the selected website's folder for changes, default: false, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "BOOL")]
watch: Option<bool>,
/// DEV: Activate checking address hashes for collisions when indexing, default: false, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "BOOL")]
check_collisions: Option<bool>,
}
impl Config {
pub fn import() -> color_eyre::Result<Self> {
let config_args = Some(Config::parse());
let path = dot_brk_path();
let _ = fs::create_dir_all(&path);
let path = path.join("config.toml");
let mut config_saved = Self::read(&path);
if let Some(mut config_args) = config_args {
if let Some(bitcoindir) = config_args.bitcoindir.take() {
config_saved.bitcoindir = Some(bitcoindir);
}
if let Some(blocksdir) = config_args.blocksdir.take() {
config_saved.blocksdir = Some(blocksdir);
}
if let Some(brkdir) = config_args.brkdir.take() {
config_saved.brkdir = Some(brkdir);
}
if let Some(services) = config_args.services.take() {
config_saved.services = Some(services);
}
if let Some(computation) = config_args.computation.take() {
config_saved.computation = Some(computation);
}
if let Some(fetch) = config_args.fetch.take() {
config_saved.fetch = Some(fetch);
}
if let Some(format) = config_args.format.take() {
config_saved.format = Some(format);
}
if let Some(website) = config_args.website.take() {
config_saved.website = Some(website);
}
if let Some(rpcconnect) = config_args.rpcconnect.take() {
config_saved.rpcconnect = Some(rpcconnect);
}
if let Some(rpcport) = config_args.rpcport.take() {
config_saved.rpcport = Some(rpcport);
}
if let Some(rpccookiefile) = config_args.rpccookiefile.take() {
config_saved.rpccookiefile = Some(rpccookiefile);
}
if let Some(rpcuser) = config_args.rpcuser.take() {
config_saved.rpcuser = Some(rpcuser);
}
if let Some(rpcpassword) = config_args.rpcpassword.take() {
config_saved.rpcpassword = Some(rpcpassword);
}
if let Some(delay) = config_args.delay.take() {
config_saved.delay = Some(delay);
}
if let Some(check_collisions) = config_args.check_collisions.take() {
config_saved.check_collisions = Some(check_collisions);
}
if let Some(watch) = config_args.watch.take() {
config_saved.watch = Some(watch);
}
if config_args != Config::default() {
dbg!(config_args);
panic!("Didn't consume the full config")
}
}
let config = config_saved;
config.check();
config.write(&path)?;
Ok(config)
}
fn check(&self) {
if !self.bitcoindir().is_dir() {
println!("{:?} isn't a valid directory", self.bitcoindir());
println!("Please use the --bitcoindir parameter to set a valid path.");
println!("Run the program with '-h' for help.");
std::process::exit(1);
}
if !self.blocksdir().is_dir() {
println!("{:?} isn't a valid directory", self.blocksdir());
println!("Please use the --blocksdir parameter to set a valid path.");
println!("Run the program with '-h' for help.");
std::process::exit(1);
}
if !self.brkdir().is_dir() {
println!("{:?} isn't a valid directory", self.brkdir());
println!("Please use the --brkdir parameter to set a valid path.");
println!("Run the program with '-h' for help.");
std::process::exit(1);
}
if self.rpc_auth().is_err() {
println!(
"No way found to authenticate the RPC client, please either set --rpccookiefile or --rpcuser and --rpcpassword.\nRun the program with '-h' for help."
);
std::process::exit(1);
}
}
fn read(path: &Path) -> Self {
fs::read_to_string(path).map_or_else(
|_| Config::default(),
|contents| toml::from_str(&contents).unwrap_or_default(),
)
}
fn write(&self, path: &Path) -> std::io::Result<()> {
fs::write(path, toml::to_string(self).unwrap())
}
pub fn rpc(&self) -> color_eyre::Result<&'static Client> {
Ok(Box::leak(Box::new(Client::new(
&format!(
"http://{}:{}",
self.rpcconnect().unwrap_or(&"localhost".to_string()),
self.rpcport().unwrap_or(8332)
),
self.rpc_auth().unwrap(),
)?)))
}
fn rpc_auth(&self) -> color_eyre::Result<Auth> {
let cookie = self.path_cookiefile();
if cookie.is_file() {
Ok(Auth::CookieFile(cookie))
} else if self.rpcuser.is_some() && self.rpcpassword.is_some() {
Ok(Auth::UserPass(
self.rpcuser.clone().unwrap(),
self.rpcpassword.clone().unwrap(),
))
} else {
Err(eyre!("Failed to find correct auth"))
}
}
fn rpcconnect(&self) -> Option<&String> {
self.rpcconnect.as_ref()
}
fn rpcport(&self) -> Option<u16> {
self.rpcport
}
pub fn delay(&self) -> Option<u64> {
self.delay
}
pub fn bitcoindir(&self) -> PathBuf {
self.bitcoindir
.as_ref()
.map_or_else(default_bitcoin_path, |s| Self::fix_user_path(s.as_ref()))
}
pub fn blocksdir(&self) -> PathBuf {
self.blocksdir.as_ref().map_or_else(
|| self.bitcoindir().join("blocks"),
|blocksdir| Self::fix_user_path(blocksdir.as_str()),
)
}
pub fn brkdir(&self) -> PathBuf {
self.brkdir
.as_ref()
.map_or_else(default_brk_path, |s| Self::fix_user_path(s.as_ref()))
}
pub fn outputsdir(&self) -> PathBuf {
self.brkdir().join("outputs")
}
pub fn harsdir(&self) -> PathBuf {
self.outputsdir().join("hars")
}
pub fn process(&self) -> bool {
self.services
.is_none_or(|m| m == Services::All || m == Services::Processor)
}
pub fn serve(&self) -> bool {
self.services
.is_none_or(|m| m == Services::All || m == Services::Server)
}
fn path_cookiefile(&self) -> PathBuf {
self.rpccookiefile.as_ref().map_or_else(
|| self.bitcoindir().join(".cookie"),
|p| Self::fix_user_path(p.as_str()),
)
}
fn fix_user_path(path: &str) -> PathBuf {
let fix = move |pattern: &str| {
if path.starts_with(pattern) {
let path = &path
.replace(&format!("{pattern}/"), "")
.replace(pattern, "");
let home = std::env::var("HOME").unwrap();
Some(Path::new(&home).join(path))
} else {
None
}
};
fix("~").unwrap_or_else(|| fix("$HOME").unwrap_or_else(|| PathBuf::from(&path)))
}
pub fn website(&self) -> Website {
self.website.unwrap_or(Website::Default)
}
pub fn fetch(&self) -> bool {
self.fetch.is_none_or(|b| b)
}
pub fn fetcher(&self) -> Option<Fetcher> {
self.fetch()
.then(|| Fetcher::import(Some(self.harsdir().as_path())).unwrap())
}
pub fn computation(&self) -> Computation {
self.computation.unwrap_or_default()
}
pub fn format(&self) -> Format {
self.format.unwrap_or_default()
}
pub fn check_collisions(&self) -> bool {
self.check_collisions.is_some_and(|b| b)
}
pub fn watch(&self) -> bool {
self.watch.is_some_and(|b| b)
}
}

View File

@@ -1,30 +1,12 @@
use std::{fs, thread}; use std::{fs, thread};
use brk_core::{dot_brk_log_path, dot_brk_path}; use brk_core::{dot_brk_log_path, dot_brk_path};
use brk_query::Params as QueryArgs;
use clap::Parser;
use clap_derive::{Parser, Subcommand};
use query::query;
use run::{RunConfig, run};
mod query; mod config;
mod run; mod run;
mod services;
#[derive(Parser)] use run::*;
#[command(version, about)]
#[command(propagate_version = true)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand, Debug)]
enum Commands {
/// Run the indexer, computer and server, use `run -h` for more information
Run(RunConfig),
/// Query generated datasets via the `run` command in a similar fashion as the server's API, use `query -h` for more information
Query(QueryArgs),
}
pub fn main() -> color_eyre::Result<()> { pub fn main() -> color_eyre::Result<()> {
color_eyre::install()?; color_eyre::install()?;
@@ -33,14 +15,9 @@ pub fn main() -> color_eyre::Result<()> {
brk_logger::init(Some(&dot_brk_log_path())); brk_logger::init(Some(&dot_brk_log_path()));
let cli = Cli::parse();
thread::Builder::new() thread::Builder::new()
.stack_size(128 * 1024 * 1024) .stack_size(256 * 1024 * 1024)
.spawn(|| match cli.command { .spawn(run)?
Commands::Run(args) => run(args),
Commands::Query(args) => query(args),
})?
.join() .join()
.unwrap() .unwrap()
} }

View File

@@ -1,55 +0,0 @@
use brk_computer::Computer;
use brk_indexer::Indexer;
use brk_query::{Index, Output, Params as QueryParams, Query, Tabled, Value};
use tabled::settings::Style;
use crate::run::RunConfig;
pub fn query(params: QueryParams) -> color_eyre::Result<()> {
let config = RunConfig::import(None)?;
let format = config.format();
let mut indexer = Indexer::new(&config.outputsdir(), config.check_collisions())?;
indexer.import_vecs()?;
let mut computer = Computer::new(&config.outputsdir(), config.fetcher(), format);
computer.import_vecs(&indexer, config.computation())?;
let query = Query::build(&indexer, &computer);
let index = Index::try_from(params.index.as_str())?;
let ids = params.values.iter().map(|s| s.as_str()).collect::<Vec<_>>();
let from = params.from();
let to = params.to();
let format = params.format();
let res = query.search_and_format(index, &ids, from, to, format)?;
if format.is_some() {
println!("{}", res);
} else {
println!(
"{}",
match res {
Output::Json(v) => match v {
Value::Single(v) => v.to_string().replace("\"", ""),
v => {
let v = match v {
Value::Single(_) => unreachable!("Already processed"),
Value::List(v) => vec![v],
Value::Matrix(v) => v,
};
let mut table =
v.to_table(ids.iter().map(|id| id.to_string()).collect::<Vec<_>>());
table.with(Style::psql());
table.to_string()
}
},
_ => unreachable!(),
}
);
}
Ok(())
}

View File

@@ -1,25 +1,16 @@
use std::{ use std::{thread::sleep, time::Duration};
fs,
path::{Path, PathBuf},
thread::sleep,
time::Duration,
};
use bitcoincore_rpc::{self, Auth, Client, RpcApi}; use bitcoincore_rpc::{self, RpcApi};
use brk_computer::Computer; use brk_computer::Computer;
use brk_core::{default_bitcoin_path, default_brk_path, default_on_error, dot_brk_path};
use brk_exit::Exit; use brk_exit::Exit;
use brk_fetcher::Fetcher;
use brk_indexer::Indexer; use brk_indexer::Indexer;
use brk_server::{Server, Website}; use brk_server::Server;
use brk_vec::{Computation, Format};
use clap_derive::{Parser, ValueEnum};
use color_eyre::eyre::eyre;
use log::info; use log::info;
use serde::{Deserialize, Serialize};
pub fn run(config: RunConfig) -> color_eyre::Result<()> { use crate::config::Config;
let config = RunConfig::import(Some(config))?;
pub fn run() -> color_eyre::Result<()> {
let config = Config::import()?;
let rpc = config.rpc()?; let rpc = config.rpc()?;
@@ -29,9 +20,7 @@ pub fn run(config: RunConfig) -> color_eyre::Result<()> {
let format = config.format(); let format = config.format();
let mut indexer = Indexer::new(&config.outputsdir(), config.check_collisions())?; let mut indexer = Indexer::forced_import(&config.outputsdir())?;
indexer.import_stores()?;
indexer.import_vecs()?;
let wait_for_synced_node = || -> color_eyre::Result<()> { let wait_for_synced_node = || -> color_eyre::Result<()> {
let is_synced = || -> color_eyre::Result<bool> { let is_synced = || -> color_eyre::Result<bool> {
@@ -49,9 +38,13 @@ pub fn run(config: RunConfig) -> color_eyre::Result<()> {
Ok(()) Ok(())
}; };
let mut computer = Computer::new(&config.outputsdir(), config.fetcher(), format); let mut computer = Computer::forced_import(
computer.import_stores(&indexer)?; &config.outputsdir(),
computer.import_vecs(&indexer, config.computation())?; &indexer,
config.computation(),
config.fetcher(),
format,
)?;
tokio::runtime::Builder::new_multi_thread() tokio::runtime::Builder::new_multi_thread()
.enable_all() .enable_all()
@@ -83,7 +76,8 @@ pub fn run(config: RunConfig) -> color_eyre::Result<()> {
info!("{} blocks found.", block_count + 1); info!("{} blocks found.", block_count + 1);
let starting_indexes = indexer.index(&parser, rpc, &exit)?; let starting_indexes =
indexer.index(&parser, rpc, &exit, config.check_collisions())?;
computer.compute(&mut indexer, starting_indexes, &exit)?; computer.compute(&mut indexer, starting_indexes, &exit)?;
@@ -106,368 +100,3 @@ pub fn run(config: RunConfig) -> color_eyre::Result<()> {
Ok(()) Ok(())
}) })
} }
#[derive(Parser, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)]
pub struct RunConfig {
/// Bitcoin main directory path, defaults: ~/.bitcoin, ~/Library/Application\ Support/Bitcoin, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PATH")]
bitcoindir: Option<String>,
/// Bitcoin blocks directory path, default: --bitcoindir/blocks, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PATH")]
blocksdir: Option<String>,
/// Bitcoin Research Kit outputs directory path, default: ~/.brk, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PATH")]
brkdir: Option<String>,
/// Activated services, default: all, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short, long)]
services: Option<Services>,
/// Computation of computed datasets, `lazy` computes data whenever requested without saving it, `eager` computes the data once and saves it to disk, default: `lazy`, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short, long)]
computation: Option<Computation>,
/// Format of computed datasets, `compressed` to save disk space (experimental), `raw` to prioritize speed, default: `raw`, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short, long)]
format: Option<Format>,
/// Activate fetching prices from exchanges APIs and the computation of all related datasets, default: true, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short = 'F', long, value_name = "BOOL")]
fetch: Option<bool>,
/// Website served by the server (if active), default: default, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(short, long)]
website: Option<Website>,
/// Bitcoin RPC ip, default: localhost, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "IP")]
rpcconnect: Option<String>,
/// Bitcoin RPC port, default: 8332, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PORT")]
rpcport: Option<u16>,
/// Bitcoin RPC cookie file, default: --bitcoindir/.cookie, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PATH")]
rpccookiefile: Option<String>,
/// Bitcoin RPC username, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "USERNAME")]
rpcuser: Option<String>,
/// Bitcoin RPC password, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "PASSWORD")]
rpcpassword: Option<String>,
/// Delay between runs, default: 0, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "SECONDS")]
delay: Option<u64>,
/// DEV: Activate to watch the selected website's folder for changes, default: false, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "BOOL")]
watch: Option<bool>,
/// DEV: Activate checking address hashes for collisions when indexing, default: false, saved
#[serde(default, deserialize_with = "default_on_error")]
#[arg(long, value_name = "BOOL")]
check_collisions: Option<bool>,
}
impl RunConfig {
pub fn import(config_args: Option<RunConfig>) -> color_eyre::Result<Self> {
let path = dot_brk_path();
let _ = fs::create_dir_all(&path);
let path = path.join("config.toml");
let mut config_saved = Self::read(&path);
if let Some(mut config_args) = config_args {
if let Some(bitcoindir) = config_args.bitcoindir.take() {
config_saved.bitcoindir = Some(bitcoindir);
}
if let Some(blocksdir) = config_args.blocksdir.take() {
config_saved.blocksdir = Some(blocksdir);
}
if let Some(brkdir) = config_args.brkdir.take() {
config_saved.brkdir = Some(brkdir);
}
if let Some(services) = config_args.services.take() {
config_saved.services = Some(services);
}
if let Some(computation) = config_args.computation.take() {
config_saved.computation = Some(computation);
}
if let Some(fetch) = config_args.fetch.take() {
config_saved.fetch = Some(fetch);
}
if let Some(format) = config_args.format.take() {
config_saved.format = Some(format);
}
if let Some(website) = config_args.website.take() {
config_saved.website = Some(website);
}
if let Some(rpcconnect) = config_args.rpcconnect.take() {
config_saved.rpcconnect = Some(rpcconnect);
}
if let Some(rpcport) = config_args.rpcport.take() {
config_saved.rpcport = Some(rpcport);
}
if let Some(rpccookiefile) = config_args.rpccookiefile.take() {
config_saved.rpccookiefile = Some(rpccookiefile);
}
if let Some(rpcuser) = config_args.rpcuser.take() {
config_saved.rpcuser = Some(rpcuser);
}
if let Some(rpcpassword) = config_args.rpcpassword.take() {
config_saved.rpcpassword = Some(rpcpassword);
}
if let Some(delay) = config_args.delay.take() {
config_saved.delay = Some(delay);
}
if let Some(check_collisions) = config_args.check_collisions.take() {
config_saved.check_collisions = Some(check_collisions);
}
if let Some(watch) = config_args.watch.take() {
config_saved.watch = Some(watch);
}
if config_args != RunConfig::default() {
dbg!(config_args);
panic!("Didn't consume the full config")
}
}
let config = config_saved;
config.check();
config.write(&path)?;
Ok(config)
}
fn check(&self) {
if !self.bitcoindir().is_dir() {
println!("{:?} isn't a valid directory", self.bitcoindir());
println!("Please use the --bitcoindir parameter to set a valid path.");
println!("Run the program with '-h' for help.");
std::process::exit(1);
}
if !self.blocksdir().is_dir() {
println!("{:?} isn't a valid directory", self.blocksdir());
println!("Please use the --blocksdir parameter to set a valid path.");
println!("Run the program with '-h' for help.");
std::process::exit(1);
}
if !self.brkdir().is_dir() {
println!("{:?} isn't a valid directory", self.brkdir());
println!("Please use the --brkdir parameter to set a valid path.");
println!("Run the program with '-h' for help.");
std::process::exit(1);
}
if self.rpc_auth().is_err() {
println!(
"No way found to authenticate the RPC client, please either set --rpccookiefile or --rpcuser and --rpcpassword.\nRun the program with '-h' for help."
);
std::process::exit(1);
}
}
fn read(path: &Path) -> Self {
fs::read_to_string(path).map_or_else(
|_| RunConfig::default(),
|contents| toml::from_str(&contents).unwrap_or_default(),
)
}
fn write(&self, path: &Path) -> std::io::Result<()> {
fs::write(path, toml::to_string(self).unwrap())
}
pub fn rpc(&self) -> color_eyre::Result<&'static Client> {
Ok(Box::leak(Box::new(Client::new(
&format!(
"http://{}:{}",
self.rpcconnect().unwrap_or(&"localhost".to_string()),
self.rpcport().unwrap_or(8332)
),
self.rpc_auth().unwrap(),
)?)))
}
fn rpc_auth(&self) -> color_eyre::Result<Auth> {
let cookie = self.path_cookiefile();
if cookie.is_file() {
Ok(Auth::CookieFile(cookie))
} else if self.rpcuser.is_some() && self.rpcpassword.is_some() {
Ok(Auth::UserPass(
self.rpcuser.clone().unwrap(),
self.rpcpassword.clone().unwrap(),
))
} else {
Err(eyre!("Failed to find correct auth"))
}
}
fn rpcconnect(&self) -> Option<&String> {
self.rpcconnect.as_ref()
}
fn rpcport(&self) -> Option<u16> {
self.rpcport
}
pub fn delay(&self) -> Option<u64> {
self.delay
}
pub fn bitcoindir(&self) -> PathBuf {
self.bitcoindir
.as_ref()
.map_or_else(default_bitcoin_path, |s| Self::fix_user_path(s.as_ref()))
}
pub fn blocksdir(&self) -> PathBuf {
self.blocksdir.as_ref().map_or_else(
|| self.bitcoindir().join("blocks"),
|blocksdir| Self::fix_user_path(blocksdir.as_str()),
)
}
pub fn brkdir(&self) -> PathBuf {
self.brkdir
.as_ref()
.map_or_else(default_brk_path, |s| Self::fix_user_path(s.as_ref()))
}
pub fn outputsdir(&self) -> PathBuf {
self.brkdir().join("outputs")
}
pub fn harsdir(&self) -> PathBuf {
self.outputsdir().join("hars")
}
pub fn process(&self) -> bool {
self.services
.is_none_or(|m| m == Services::All || m == Services::Processor)
}
pub fn serve(&self) -> bool {
self.services
.is_none_or(|m| m == Services::All || m == Services::Server)
}
fn path_cookiefile(&self) -> PathBuf {
self.rpccookiefile.as_ref().map_or_else(
|| self.bitcoindir().join(".cookie"),
|p| Self::fix_user_path(p.as_str()),
)
}
fn fix_user_path(path: &str) -> PathBuf {
let fix = move |pattern: &str| {
if path.starts_with(pattern) {
let path = &path
.replace(&format!("{pattern}/"), "")
.replace(pattern, "");
let home = std::env::var("HOME").unwrap();
Some(Path::new(&home).join(path))
} else {
None
}
};
fix("~").unwrap_or_else(|| fix("$HOME").unwrap_or_else(|| PathBuf::from(&path)))
}
pub fn website(&self) -> Website {
self.website.unwrap_or(Website::Default)
}
pub fn fetch(&self) -> bool {
self.fetch.is_none_or(|b| b)
}
pub fn fetcher(&self) -> Option<Fetcher> {
self.fetch()
.then(|| Fetcher::import(Some(self.harsdir().as_path())).unwrap())
}
pub fn computation(&self) -> Computation {
self.computation.unwrap_or_default()
}
pub fn format(&self) -> Format {
self.format.unwrap_or_default()
}
pub fn check_collisions(&self) -> bool {
self.check_collisions.is_some_and(|b| b)
}
pub fn watch(&self) -> bool {
self.watch.is_some_and(|b| b)
}
}
#[derive(
Default,
Debug,
Clone,
Copy,
Parser,
ValueEnum,
Serialize,
Deserialize,
PartialEq,
Eq,
PartialOrd,
Ord,
)]
pub enum Services {
#[default]
All,
Processor,
Server,
}

View File

@@ -0,0 +1,23 @@
use clap_derive::{Parser, ValueEnum};
use serde::{Deserialize, Serialize};
#[derive(
Default,
Debug,
Clone,
Copy,
Parser,
ValueEnum,
Serialize,
Deserialize,
PartialEq,
Eq,
PartialOrd,
Ord,
)]
pub enum Services {
#[default]
All,
Processor,
Server,
}

View File

@@ -33,17 +33,19 @@ pub fn main() -> color_eyre::Result<()> {
let format = Format::Raw; let format = Format::Raw;
let mut indexer = Indexer::new(outputs_dir, true)?; let mut indexer = Indexer::forced_import(outputs_dir)?;
indexer.import_stores()?;
indexer.import_vecs()?;
let fetcher = Fetcher::import(None)?; let fetcher = Fetcher::import(None)?;
let mut computer = Computer::new(outputs_dir, Some(fetcher), format); let mut computer = Computer::forced_import(
computer.import_stores(&indexer)?; outputs_dir,
computer.import_vecs(&indexer, Computation::Lazy)?; &indexer,
Computation::Lazy,
Some(fetcher),
format,
)?;
let starting_indexes = indexer.index(&parser, rpc, &exit)?; let starting_indexes = indexer.index(&parser, rpc, &exit, true)?;
computer.compute(&mut indexer, starting_indexes, &exit)?; computer.compute(&mut indexer, starting_indexes, &exit)?;

View File

@@ -3,13 +3,13 @@
#![doc = include_str!("../examples/main.rs")] #![doc = include_str!("../examples/main.rs")]
#![doc = "```"] #![doc = "```"]
use std::path::{Path, PathBuf}; use std::path::Path;
use brk_core::Version; use brk_core::Version;
use brk_exit::Exit; use brk_exit::Exit;
use brk_fetcher::Fetcher; use brk_fetcher::Fetcher;
use brk_indexer::Indexer; use brk_indexer::Indexer;
use brk_vec::{AnyCollectableVec, Computation, Format}; use brk_vec::{Computation, Format};
mod stores; mod stores;
mod utils; mod utils;
@@ -21,53 +21,40 @@ use vecs::Vecs;
#[derive(Clone)] #[derive(Clone)]
pub struct Computer { pub struct Computer {
path: PathBuf,
fetcher: Option<Fetcher>, fetcher: Option<Fetcher>,
vecs: Option<Vecs>, pub vecs: Vecs,
stores: Option<Stores>, pub stores: Stores,
format: Format,
} }
const VERSION: Version = Version::ONE; const VERSION: Version = Version::ONE;
impl Computer { impl Computer {
pub fn new(outputs_dir: &Path, fetcher: Option<Fetcher>, format: Format) -> Self { /// Do NOT import multiple times or things will break !!!
Self { pub fn forced_import(
path: outputs_dir.to_owned(), outputs_dir: &Path,
fetcher,
vecs: None,
stores: None,
format,
}
}
pub fn import_vecs(
&mut self,
indexer: &Indexer, indexer: &Indexer,
computation: Computation, computation: Computation,
) -> color_eyre::Result<()> { fetcher: Option<Fetcher>,
self.vecs = Some(Vecs::import( format: Format,
// TODO: Give self.path, join inside import ) -> color_eyre::Result<Self> {
&self.path.join("vecs/computed"), Ok(Self {
VERSION + Version::ZERO, vecs: Vecs::import(
indexer, // TODO: Give self.path, join inside import
self.fetcher.is_some(), &outputs_dir.join("vecs/computed"),
computation, VERSION + Version::ZERO,
self.format, indexer,
)?); fetcher.is_some(),
Ok(()) computation,
} format,
)?,
/// Do NOT import multiple times or things will break !!! stores: Stores::import(
/// Clone struct instead // TODO: Give self.path, join inside import
pub fn import_stores(&mut self, indexer: &Indexer) -> color_eyre::Result<()> { &outputs_dir.join("stores"),
self.stores = Some(Stores::import( VERSION + Version::ZERO,
// TODO: Give self.path, join inside import &indexer.stores.keyspace,
&self.path.join("stores"), )?,
VERSION + Version::ZERO, fetcher,
indexer.keyspace(), })
)?);
Ok(())
} }
} }
@@ -80,25 +67,6 @@ impl Computer {
) -> color_eyre::Result<()> { ) -> color_eyre::Result<()> {
info!("Computing..."); info!("Computing...");
self.vecs self.vecs
.as_mut()
.unwrap()
.compute(indexer, starting_indexes, self.fetcher.as_mut(), exit) .compute(indexer, starting_indexes, self.fetcher.as_mut(), exit)
} }
pub fn vecs(&self) -> Vec<&dyn AnyCollectableVec> {
// pub fn vecs(&self) -> &Vecs {
self.vecs.as_ref().unwrap().vecs()
}
// pub fn mut_vecs(&mut self) -> &mut Vecs {
// self.vecs.as_mut().unwrap()
// }
pub fn stores(&self) -> &Stores {
self.stores.as_ref().unwrap()
}
pub fn mut_stores(&mut self) -> &mut Stores {
self.stores.as_mut().unwrap()
}
} }

View File

@@ -154,23 +154,19 @@ impl Vecs {
starting_indexes, starting_indexes,
exit, exit,
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
let indexer_vecs = indexer.vecs();
v.compute_range( v.compute_range(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_weight, &indexer.vecs.height_to_weight,
|h| (h, StoredU32::from(1_u32)), |h| (h, StoredU32::from(1_u32)),
exit, exit,
) )
}, },
)?; )?;
let indexer_vecs = indexer.vecs(); let mut height_to_timestamp_iter = indexer.vecs.height_to_timestamp.iter();
let mut height_to_timestamp_iter = indexer_vecs.height_to_timestamp.iter();
self.height_to_interval.compute_transform( self.height_to_interval.compute_transform(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_timestamp, &indexer.vecs.height_to_timestamp,
|(height, timestamp, ..)| { |(height, timestamp, ..)| {
let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| { let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| {
let prev_timestamp = height_to_timestamp_iter.unwrap_get_inner(prev_h); let prev_timestamp = height_to_timestamp_iter.unwrap_get_inner(prev_h);
@@ -194,19 +190,19 @@ impl Vecs {
indexes, indexes,
starting_indexes, starting_indexes,
exit, exit,
Some(&indexer_vecs.height_to_weight), Some(&indexer.vecs.height_to_weight),
)?; )?;
self.indexes_to_block_size.compute_rest( self.indexes_to_block_size.compute_rest(
indexes, indexes,
starting_indexes, starting_indexes,
exit, exit,
Some(&indexer_vecs.height_to_total_size), Some(&indexer.vecs.height_to_total_size),
)?; )?;
self.height_to_vbytes.compute_transform( self.height_to_vbytes.compute_transform(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_weight, &indexer.vecs.height_to_weight,
|(h, w, ..)| { |(h, w, ..)| {
( (
h, h,
@@ -223,7 +219,7 @@ impl Vecs {
Some(&self.height_to_vbytes), Some(&self.height_to_vbytes),
)?; )?;
let mut height_to_timestamp_iter = indexer_vecs.height_to_timestamp.iter(); let mut height_to_timestamp_iter = indexer.vecs.height_to_timestamp.iter();
self.difficultyepoch_to_timestamp.compute_transform( self.difficultyepoch_to_timestamp.compute_transform(
starting_indexes.difficultyepoch, starting_indexes.difficultyepoch,

View File

@@ -371,12 +371,10 @@ impl Vecs {
fetcher: &mut Fetcher, fetcher: &mut Fetcher,
exit: &Exit, exit: &Exit,
) -> color_eyre::Result<()> { ) -> color_eyre::Result<()> {
let indexer_vecs = indexer.vecs(); let mut height_to_timestamp_iter = indexer.vecs.height_to_timestamp.iter();
let mut height_to_timestamp_iter = indexer_vecs.height_to_timestamp.iter();
self.height_to_ohlc_in_cents.compute_transform( self.height_to_ohlc_in_cents.compute_transform(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_timestamp, &indexer.vecs.height_to_timestamp,
|(h, t, ..)| { |(h, t, ..)| {
let ohlc = fetcher let ohlc = fetcher
.get_height( .get_height(

View File

@@ -163,7 +163,7 @@ where
self.height.compute( self.height.compute(
starting_indexes.height, starting_indexes.height,
txindex, txindex,
&indexer.vecs().height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
&indexes.height_to_txindex_count, &indexes.height_to_txindex_count,
exit, exit,
)?; )?;
@@ -173,7 +173,7 @@ where
self.height.compute( self.height.compute(
starting_indexes.height, starting_indexes.height,
txindex, txindex,
&indexer.vecs().height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
&indexes.height_to_txindex_count, &indexes.height_to_txindex_count,
exit, exit,
)?; )?;
@@ -289,7 +289,7 @@ impl ComputedVecsFromTxindex<Bitcoin> {
let starting_index = self.height.starting_index(starting_indexes.height); let starting_index = self.height.starting_index(starting_indexes.height);
(starting_index.unwrap_to_usize()..indexer.vecs().height_to_weight.len()) (starting_index.unwrap_to_usize()..indexer.vecs.height_to_weight.len())
.map(Height::from) .map(Height::from)
.try_for_each(|height| -> Result<()> { .try_for_each(|height| -> Result<()> {
if let Some(first) = self.height.first.as_mut() { if let Some(first) = self.height.first.as_mut() {
@@ -470,7 +470,7 @@ impl ComputedVecsFromTxindex<Dollars> {
let mut close_iter = fetched.chainindexes_to_close.height.into_iter(); let mut close_iter = fetched.chainindexes_to_close.height.into_iter();
(starting_index.unwrap_to_usize()..indexer.vecs().height_to_weight.len()) (starting_index.unwrap_to_usize()..indexer.vecs.height_to_weight.len())
.map(Height::from) .map(Height::from)
.try_for_each(|height| -> Result<()> { .try_for_each(|height| -> Result<()> {
let price = *close_iter.unwrap_get_inner(height); let price = *close_iter.unwrap_get_inner(height);

View File

@@ -28,6 +28,7 @@ pub struct ComputedRatioVecsFromDateIndex {
pub ratio_1y_sma_momentum_oscillator: ComputedVecsFromDateIndex<StoredF32>, pub ratio_1y_sma_momentum_oscillator: ComputedVecsFromDateIndex<StoredF32>,
pub ratio_sd: ComputedVecsFromDateIndex<StoredF32>, pub ratio_sd: ComputedVecsFromDateIndex<StoredF32>,
pub ratio_4y_sd: ComputedVecsFromDateIndex<StoredF32>, pub ratio_4y_sd: ComputedVecsFromDateIndex<StoredF32>,
pub ratio_1y_sd: ComputedVecsFromDateIndex<StoredF32>,
pub ratio_p99_9: ComputedVecsFromDateIndex<StoredF32>, pub ratio_p99_9: ComputedVecsFromDateIndex<StoredF32>,
pub ratio_p99_5: ComputedVecsFromDateIndex<StoredF32>, pub ratio_p99_5: ComputedVecsFromDateIndex<StoredF32>,
pub ratio_p99: ComputedVecsFromDateIndex<StoredF32>, pub ratio_p99: ComputedVecsFromDateIndex<StoredF32>,
@@ -54,6 +55,7 @@ pub struct ComputedRatioVecsFromDateIndex {
pub ratio_m3sd_as_price: ComputedVecsFromDateIndex<Dollars>, pub ratio_m3sd_as_price: ComputedVecsFromDateIndex<Dollars>,
pub ratio_zscore: ComputedVecsFromDateIndex<StoredF32>, pub ratio_zscore: ComputedVecsFromDateIndex<StoredF32>,
pub ratio_4y_zscore: ComputedVecsFromDateIndex<StoredF32>, pub ratio_4y_zscore: ComputedVecsFromDateIndex<StoredF32>,
pub ratio_1y_zscore: ComputedVecsFromDateIndex<StoredF32>,
} }
const VERSION: Version = Version::ZERO; const VERSION: Version = Version::ZERO;
@@ -152,6 +154,14 @@ impl ComputedRatioVecsFromDateIndex {
format, format,
options, options,
)?, )?,
ratio_1y_sd: ComputedVecsFromDateIndex::forced_import(
path,
&format!("{name}_ratio_1y_sd"),
true,
version + VERSION + Version::ZERO,
format,
options,
)?,
ratio_p99_9: ComputedVecsFromDateIndex::forced_import( ratio_p99_9: ComputedVecsFromDateIndex::forced_import(
path, path,
&format!("{name}_ratio_p99_9"), &format!("{name}_ratio_p99_9"),
@@ -360,6 +370,14 @@ impl ComputedRatioVecsFromDateIndex {
format, format,
options, options,
)?, )?,
ratio_1y_zscore: ComputedVecsFromDateIndex::forced_import(
path,
&format!("{name}_ratio_1y_zscore"),
true,
version + VERSION + Version::ZERO,
format,
options,
)?,
}) })
} }
@@ -571,8 +589,8 @@ impl ComputedRatioVecsFromDateIndex {
// } // }
let mut sma_iter = self.ratio_sma.dateindex.as_ref().unwrap().into_iter(); let mut sma_iter = self.ratio_sma.dateindex.as_ref().unwrap().into_iter();
let mut _4y_sma_iter = self.ratio_4y_sma.dateindex.as_ref().unwrap().into_iter(); let mut _4y_sma_iter = self.ratio_4y_sma.dateindex.as_ref().unwrap().into_iter();
let mut _1y_sma_iter = self.ratio_1y_sma.dateindex.as_ref().unwrap().into_iter();
let nan = StoredF32::from(f32::NAN); let nan = StoredF32::from(f32::NAN);
self.ratio self.ratio
@@ -622,6 +640,11 @@ impl ComputedRatioVecsFromDateIndex {
.as_mut() .as_mut()
.unwrap() .unwrap()
.forced_push_at(index, nan, exit)?; .forced_push_at(index, nan, exit)?;
self.ratio_1y_sd
.dateindex
.as_mut()
.unwrap()
.forced_push_at(index, nan, exit)?;
self.ratio_p1sd self.ratio_p1sd
.dateindex .dateindex
@@ -716,6 +739,20 @@ impl ComputedRatioVecsFromDateIndex {
.unwrap() .unwrap()
.forced_push_at(index, _4y_sd, exit)?; .forced_push_at(index, _4y_sd, exit)?;
let _1y_avg = _1y_sma_iter.unwrap_get_inner(index);
let _1y_sd = StoredF32::from(
(sorted.iter().map(|v| (**v - *_1y_avg).powi(2)).sum::<f32>()
/ (index.unwrap_to_usize() + 1) as f32)
.sqrt(),
);
self.ratio_1y_sd
.dateindex
.as_mut()
.unwrap()
.forced_push_at(index, _1y_sd, exit)?;
self.ratio_p1sd.dateindex.as_mut().unwrap().forced_push_at( self.ratio_p1sd.dateindex.as_mut().unwrap().forced_push_at(
index, index,
avg + sd, avg + sd,
@@ -803,6 +840,12 @@ impl ComputedRatioVecsFromDateIndex {
exit, exit,
None as Option<&EagerVec<_, _>>, None as Option<&EagerVec<_, _>>,
)?; )?;
self.ratio_1y_sd.compute_rest(
indexes,
starting_indexes,
exit,
None as Option<&EagerVec<_, _>>,
)?;
self.ratio_p1sd.compute_rest( self.ratio_p1sd.compute_rest(
indexes, indexes,
starting_indexes, starting_indexes,
@@ -1104,6 +1147,22 @@ impl ComputedRatioVecsFromDateIndex {
}, },
)?; )?;
self.ratio_1y_zscore.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_zscore(
starting_indexes.dateindex,
self.ratio.dateindex.as_ref().unwrap(),
self.ratio_1y_sma.dateindex.as_ref().unwrap(),
self.ratio_1y_sd.dateindex.as_ref().unwrap(),
exit,
)
},
)?;
Ok(()) Ok(())
} }
@@ -1111,6 +1170,7 @@ impl ComputedRatioVecsFromDateIndex {
vec![ vec![
self.ratio_sd.dateindex.as_mut().unwrap(), self.ratio_sd.dateindex.as_mut().unwrap(),
self.ratio_4y_sd.dateindex.as_mut().unwrap(), self.ratio_4y_sd.dateindex.as_mut().unwrap(),
self.ratio_1y_sd.dateindex.as_mut().unwrap(),
self.ratio_p99_9.dateindex.as_mut().unwrap(), self.ratio_p99_9.dateindex.as_mut().unwrap(),
self.ratio_p99_5.dateindex.as_mut().unwrap(), self.ratio_p99_5.dateindex.as_mut().unwrap(),
self.ratio_p99.dateindex.as_mut().unwrap(), self.ratio_p99.dateindex.as_mut().unwrap(),
@@ -1137,6 +1197,7 @@ impl ComputedRatioVecsFromDateIndex {
self.ratio_4y_sma.vecs(), self.ratio_4y_sma.vecs(),
self.ratio_1y_sma_momentum_oscillator.vecs(), self.ratio_1y_sma_momentum_oscillator.vecs(),
self.ratio_sd.vecs(), self.ratio_sd.vecs(),
self.ratio_1y_sd.vecs(),
self.ratio_4y_sd.vecs(), self.ratio_4y_sd.vecs(),
self.ratio_p99_9.vecs(), self.ratio_p99_9.vecs(),
self.ratio_p99_5.vecs(), self.ratio_p99_5.vecs(),
@@ -1163,6 +1224,7 @@ impl ComputedRatioVecsFromDateIndex {
self.ratio_m2sd_as_price.vecs(), self.ratio_m2sd_as_price.vecs(),
self.ratio_m3sd_as_price.vecs(), self.ratio_m3sd_as_price.vecs(),
self.ratio_zscore.vecs(), self.ratio_zscore.vecs(),
self.ratio_1y_zscore.vecs(),
self.ratio_4y_zscore.vecs(), self.ratio_4y_zscore.vecs(),
] ]
.into_iter() .into_iter()

View File

@@ -205,7 +205,7 @@ impl ComputedValueVecsFromTxindex {
dollars_txindex.compute_if_necessary( dollars_txindex.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;

View File

@@ -102,7 +102,7 @@ impl Vecs {
"outputindex", "outputindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().outputindex_to_value.boxed_clone(), indexer.vecs.outputindex_to_value.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
@@ -112,7 +112,7 @@ impl Vecs {
"inputindex", "inputindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().inputindex_to_outputindex.boxed_clone(), indexer.vecs.inputindex_to_outputindex.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
@@ -122,7 +122,7 @@ impl Vecs {
"txindex", "txindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().txindex_to_txid.boxed_clone(), indexer.vecs.txindex_to_txid.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
@@ -132,8 +132,8 @@ impl Vecs {
"input_count", "input_count",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().txindex_to_first_inputindex.boxed_clone(), indexer.vecs.txindex_to_first_inputindex.boxed_clone(),
indexer.vecs().inputindex_to_outputindex.boxed_clone(), indexer.vecs.inputindex_to_outputindex.boxed_clone(),
|index: TxIndex, txindex_to_first_inputindex_iter, inputindex_to_outputindex_iter| { |index: TxIndex, txindex_to_first_inputindex_iter, inputindex_to_outputindex_iter| {
let txindex = index.unwrap_to_usize(); let txindex = index.unwrap_to_usize();
txindex_to_first_inputindex_iter txindex_to_first_inputindex_iter
@@ -155,8 +155,8 @@ impl Vecs {
"output_count", "output_count",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().txindex_to_first_outputindex.boxed_clone(), indexer.vecs.txindex_to_first_outputindex.boxed_clone(),
indexer.vecs().outputindex_to_value.boxed_clone(), indexer.vecs.outputindex_to_value.boxed_clone(),
|index: TxIndex, txindex_to_first_outputindex_iter, outputindex_to_value_iter| { |index: TxIndex, txindex_to_first_outputindex_iter, outputindex_to_value_iter| {
let txindex = index.unwrap_to_usize(); let txindex = index.unwrap_to_usize();
txindex_to_first_outputindex_iter txindex_to_first_outputindex_iter
@@ -178,7 +178,7 @@ impl Vecs {
"p2pk33index", "p2pk33index",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().p2pk33index_to_p2pk33bytes.boxed_clone(), indexer.vecs.p2pk33index_to_p2pk33bytes.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let p2pk65index_to_p2pk65index = ComputedVec::forced_import_or_init_from_1( let p2pk65index_to_p2pk65index = ComputedVec::forced_import_or_init_from_1(
@@ -187,7 +187,7 @@ impl Vecs {
"p2pk65index", "p2pk65index",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().p2pk65index_to_p2pk65bytes.boxed_clone(), indexer.vecs.p2pk65index_to_p2pk65bytes.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let p2pkhindex_to_p2pkhindex = ComputedVec::forced_import_or_init_from_1( let p2pkhindex_to_p2pkhindex = ComputedVec::forced_import_or_init_from_1(
@@ -196,7 +196,7 @@ impl Vecs {
"p2pkhindex", "p2pkhindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().p2pkhindex_to_p2pkhbytes.boxed_clone(), indexer.vecs.p2pkhindex_to_p2pkhbytes.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let p2shindex_to_p2shindex = ComputedVec::forced_import_or_init_from_1( let p2shindex_to_p2shindex = ComputedVec::forced_import_or_init_from_1(
@@ -205,7 +205,7 @@ impl Vecs {
"p2shindex", "p2shindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().p2shindex_to_p2shbytes.boxed_clone(), indexer.vecs.p2shindex_to_p2shbytes.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let p2trindex_to_p2trindex = ComputedVec::forced_import_or_init_from_1( let p2trindex_to_p2trindex = ComputedVec::forced_import_or_init_from_1(
@@ -214,7 +214,7 @@ impl Vecs {
"p2trindex", "p2trindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().p2trindex_to_p2trbytes.boxed_clone(), indexer.vecs.p2trindex_to_p2trbytes.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let p2wpkhindex_to_p2wpkhindex = ComputedVec::forced_import_or_init_from_1( let p2wpkhindex_to_p2wpkhindex = ComputedVec::forced_import_or_init_from_1(
@@ -223,7 +223,7 @@ impl Vecs {
"p2wpkhindex", "p2wpkhindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().p2wpkhindex_to_p2wpkhbytes.boxed_clone(), indexer.vecs.p2wpkhindex_to_p2wpkhbytes.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let p2wshindex_to_p2wshindex = ComputedVec::forced_import_or_init_from_1( let p2wshindex_to_p2wshindex = ComputedVec::forced_import_or_init_from_1(
@@ -232,7 +232,7 @@ impl Vecs {
"p2wshindex", "p2wshindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().p2wshindex_to_p2wshbytes.boxed_clone(), indexer.vecs.p2wshindex_to_p2wshbytes.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let p2aindex_to_p2aindex = ComputedVec::forced_import_or_init_from_1( let p2aindex_to_p2aindex = ComputedVec::forced_import_or_init_from_1(
@@ -241,7 +241,7 @@ impl Vecs {
"p2aindex", "p2aindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().p2aindex_to_p2abytes.boxed_clone(), indexer.vecs.p2aindex_to_p2abytes.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let p2msindex_to_p2msindex = ComputedVec::forced_import_or_init_from_1( let p2msindex_to_p2msindex = ComputedVec::forced_import_or_init_from_1(
@@ -250,7 +250,7 @@ impl Vecs {
"p2msindex", "p2msindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().p2msindex_to_txindex.boxed_clone(), indexer.vecs.p2msindex_to_txindex.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let emptyoutputindex_to_emptyoutputindex = ComputedVec::forced_import_or_init_from_1( let emptyoutputindex_to_emptyoutputindex = ComputedVec::forced_import_or_init_from_1(
@@ -259,7 +259,7 @@ impl Vecs {
"emptyoutputindex", "emptyoutputindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().emptyoutputindex_to_txindex.boxed_clone(), indexer.vecs.emptyoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let unknownoutputindex_to_unknownoutputindex = ComputedVec::forced_import_or_init_from_1( let unknownoutputindex_to_unknownoutputindex = ComputedVec::forced_import_or_init_from_1(
@@ -268,7 +268,7 @@ impl Vecs {
"unknownoutputindex", "unknownoutputindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().unknownoutputindex_to_txindex.boxed_clone(), indexer.vecs.unknownoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
let opreturnindex_to_opreturnindex = ComputedVec::forced_import_or_init_from_1( let opreturnindex_to_opreturnindex = ComputedVec::forced_import_or_init_from_1(
@@ -277,7 +277,7 @@ impl Vecs {
"opreturnindex", "opreturnindex",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().opreturnindex_to_txindex.boxed_clone(), indexer.vecs.opreturnindex_to_txindex.boxed_clone(),
|index, _| Some(index), |index, _| Some(index),
)?; )?;
@@ -543,108 +543,106 @@ impl Vecs {
starting_indexes: brk_indexer::Indexes, starting_indexes: brk_indexer::Indexes,
exit: &Exit, exit: &Exit,
) -> color_eyre::Result<Indexes> { ) -> color_eyre::Result<Indexes> {
let indexer_vecs = indexer.vecs();
// --- // ---
// OutputIndex // OutputIndex
// --- // ---
self.outputindex_to_outputindex.compute_if_necessary( self.outputindex_to_outputindex.compute_if_necessary(
starting_indexes.outputindex, starting_indexes.outputindex,
&indexer_vecs.outputindex_to_value, &indexer.vecs.outputindex_to_value,
exit, exit,
)?; )?;
self.txindex_to_output_count.compute_if_necessary( self.txindex_to_output_count.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer_vecs.txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
self.txindex_to_input_count.compute_if_necessary( self.txindex_to_input_count.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer_vecs.txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
self.outputindex_to_txindex.compute_inverse_less_to_more( self.outputindex_to_txindex.compute_inverse_less_to_more(
starting_indexes.txindex, starting_indexes.txindex,
&indexer_vecs.txindex_to_first_outputindex, &indexer.vecs.txindex_to_first_outputindex,
&self.txindex_to_output_count, &self.txindex_to_output_count,
exit, exit,
)?; )?;
self.p2pk33index_to_p2pk33index.compute_if_necessary( self.p2pk33index_to_p2pk33index.compute_if_necessary(
starting_indexes.p2pk33index, starting_indexes.p2pk33index,
&indexer_vecs.p2pk33index_to_p2pk33bytes, &indexer.vecs.p2pk33index_to_p2pk33bytes,
exit, exit,
)?; )?;
self.p2pk65index_to_p2pk65index.compute_if_necessary( self.p2pk65index_to_p2pk65index.compute_if_necessary(
starting_indexes.p2pk65index, starting_indexes.p2pk65index,
&indexer_vecs.p2pk65index_to_p2pk65bytes, &indexer.vecs.p2pk65index_to_p2pk65bytes,
exit, exit,
)?; )?;
self.p2pkhindex_to_p2pkhindex.compute_if_necessary( self.p2pkhindex_to_p2pkhindex.compute_if_necessary(
starting_indexes.p2pkhindex, starting_indexes.p2pkhindex,
&indexer_vecs.p2pkhindex_to_p2pkhbytes, &indexer.vecs.p2pkhindex_to_p2pkhbytes,
exit, exit,
)?; )?;
self.p2shindex_to_p2shindex.compute_if_necessary( self.p2shindex_to_p2shindex.compute_if_necessary(
starting_indexes.p2shindex, starting_indexes.p2shindex,
&indexer_vecs.p2shindex_to_p2shbytes, &indexer.vecs.p2shindex_to_p2shbytes,
exit, exit,
)?; )?;
self.p2trindex_to_p2trindex.compute_if_necessary( self.p2trindex_to_p2trindex.compute_if_necessary(
starting_indexes.p2trindex, starting_indexes.p2trindex,
&indexer_vecs.p2trindex_to_p2trbytes, &indexer.vecs.p2trindex_to_p2trbytes,
exit, exit,
)?; )?;
self.p2wpkhindex_to_p2wpkhindex.compute_if_necessary( self.p2wpkhindex_to_p2wpkhindex.compute_if_necessary(
starting_indexes.p2wpkhindex, starting_indexes.p2wpkhindex,
&indexer_vecs.p2wpkhindex_to_p2wpkhbytes, &indexer.vecs.p2wpkhindex_to_p2wpkhbytes,
exit, exit,
)?; )?;
self.p2wshindex_to_p2wshindex.compute_if_necessary( self.p2wshindex_to_p2wshindex.compute_if_necessary(
starting_indexes.p2wshindex, starting_indexes.p2wshindex,
&indexer_vecs.p2wshindex_to_p2wshbytes, &indexer.vecs.p2wshindex_to_p2wshbytes,
exit, exit,
)?; )?;
self.emptyoutputindex_to_emptyoutputindex self.emptyoutputindex_to_emptyoutputindex
.compute_if_necessary( .compute_if_necessary(
starting_indexes.emptyoutputindex, starting_indexes.emptyoutputindex,
&indexer_vecs.emptyoutputindex_to_txindex, &indexer.vecs.emptyoutputindex_to_txindex,
exit, exit,
)?; )?;
self.p2msindex_to_p2msindex.compute_if_necessary( self.p2msindex_to_p2msindex.compute_if_necessary(
starting_indexes.p2msindex, starting_indexes.p2msindex,
&indexer_vecs.p2msindex_to_txindex, &indexer.vecs.p2msindex_to_txindex,
exit, exit,
)?; )?;
self.opreturnindex_to_opreturnindex.compute_if_necessary( self.opreturnindex_to_opreturnindex.compute_if_necessary(
starting_indexes.opreturnindex, starting_indexes.opreturnindex,
&indexer_vecs.opreturnindex_to_txindex, &indexer.vecs.opreturnindex_to_txindex,
exit, exit,
)?; )?;
self.p2aindex_to_p2aindex.compute_if_necessary( self.p2aindex_to_p2aindex.compute_if_necessary(
starting_indexes.p2aindex, starting_indexes.p2aindex,
&indexer_vecs.p2aindex_to_p2abytes, &indexer.vecs.p2aindex_to_p2abytes,
exit, exit,
)?; )?;
self.unknownoutputindex_to_unknownoutputindex self.unknownoutputindex_to_unknownoutputindex
.compute_if_necessary( .compute_if_necessary(
starting_indexes.unknownoutputindex, starting_indexes.unknownoutputindex,
&indexer_vecs.unknownoutputindex_to_txindex, &indexer.vecs.unknownoutputindex_to_txindex,
exit, exit,
)?; )?;
@@ -654,7 +652,7 @@ impl Vecs {
self.inputindex_to_inputindex.compute_if_necessary( self.inputindex_to_inputindex.compute_if_necessary(
starting_indexes.inputindex, starting_indexes.inputindex,
&indexer_vecs.inputindex_to_outputindex, &indexer.vecs.inputindex_to_outputindex,
exit, exit,
)?; )?;
@@ -664,20 +662,20 @@ impl Vecs {
self.txindex_to_txindex.compute_if_necessary( self.txindex_to_txindex.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer_vecs.txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
self.height_to_txindex_count.compute_count_from_indexes( self.height_to_txindex_count.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
&indexer_vecs.txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
self.txindex_to_height.compute_inverse_less_to_more( self.txindex_to_height.compute_inverse_less_to_more(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
&self.height_to_txindex_count, &self.height_to_txindex_count,
exit, exit,
)?; )?;
@@ -688,13 +686,13 @@ impl Vecs {
self.height_to_height.compute_from_index( self.height_to_height.compute_from_index(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_weight, &indexer.vecs.height_to_weight,
exit, exit,
)?; )?;
self.height_to_date.compute_transform( self.height_to_date.compute_transform(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_timestamp, &indexer.vecs.height_to_timestamp,
|(h, t, ..)| (h, Date::from(t)), |(h, t, ..)| (h, Date::from(t)),
exit, exit,
)?; )?;
@@ -702,7 +700,7 @@ impl Vecs {
let mut prev_timestamp_fixed = None; let mut prev_timestamp_fixed = None;
self.height_to_timestamp_fixed.compute_transform( self.height_to_timestamp_fixed.compute_transform(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_timestamp, &indexer.vecs.height_to_timestamp,
|(h, timestamp, height_to_timestamp_fixed_iter)| { |(h, timestamp, height_to_timestamp_fixed_iter)| {
if prev_timestamp_fixed.is_none() { if prev_timestamp_fixed.is_none() {
if let Some(prev_h) = h.decremented() { if let Some(prev_h) = h.decremented() {
@@ -779,7 +777,7 @@ impl Vecs {
self.dateindex_to_height_count.compute_count_from_indexes( self.dateindex_to_height_count.compute_count_from_indexes(
starting_dateindex, starting_dateindex,
&self.dateindex_to_first_height, &self.dateindex_to_first_height,
&indexer_vecs.height_to_weight, &indexer.vecs.height_to_weight,
exit, exit,
)?; )?;
@@ -829,7 +827,7 @@ impl Vecs {
self.height_to_difficultyepoch.compute_from_index( self.height_to_difficultyepoch.compute_from_index(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_weight, &indexer.vecs.height_to_weight,
exit, exit,
)?; )?;
@@ -979,7 +977,7 @@ impl Vecs {
self.height_to_halvingepoch.compute_from_index( self.height_to_halvingepoch.compute_from_index(
starting_indexes.height, starting_indexes.height,
&indexer_vecs.height_to_weight, &indexer.vecs.height_to_weight,
exit, exit,
)?; )?;

View File

@@ -116,7 +116,7 @@ impl Vecs {
indexes, indexes,
starting_indexes, starting_indexes,
exit, exit,
Some(&indexer.vecs().height_to_difficulty), Some(&indexer.vecs.height_to_difficulty),
)?; )?;
Ok(()) Ok(())

View File

@@ -1205,18 +1205,16 @@ impl Vecs {
starting_indexes: &mut Indexes, starting_indexes: &mut Indexes,
exit: &Exit, exit: &Exit,
) -> color_eyre::Result<()> { ) -> color_eyre::Result<()> {
let indexer_vecs = indexer.vecs(); let height_to_first_outputindex = &indexer.vecs.height_to_first_outputindex;
let height_to_first_inputindex = &indexer.vecs.height_to_first_inputindex;
let height_to_first_outputindex = &indexer_vecs.height_to_first_outputindex;
let height_to_first_inputindex = &indexer_vecs.height_to_first_inputindex;
let height_to_output_count = transactions.indexes_to_output_count.height.unwrap_sum(); let height_to_output_count = transactions.indexes_to_output_count.height.unwrap_sum();
let height_to_input_count = transactions.indexes_to_input_count.height.unwrap_sum(); let height_to_input_count = transactions.indexes_to_input_count.height.unwrap_sum();
let inputindex_to_outputindex = &indexer_vecs.inputindex_to_outputindex; let inputindex_to_outputindex = &indexer.vecs.inputindex_to_outputindex;
let outputindex_to_value = &indexer_vecs.outputindex_to_value; let outputindex_to_value = &indexer.vecs.outputindex_to_value;
let txindex_to_height = &indexes.txindex_to_height; let txindex_to_height = &indexes.txindex_to_height;
let height_to_timestamp_fixed = &indexes.height_to_timestamp_fixed; let height_to_timestamp_fixed = &indexes.height_to_timestamp_fixed;
let outputindex_to_txindex = &indexes.outputindex_to_txindex; let outputindex_to_txindex = &indexes.outputindex_to_txindex;
let outputindex_to_outputtype = &indexer_vecs.outputindex_to_outputtype; let outputindex_to_outputtype = &indexer.vecs.outputindex_to_outputtype;
let height_to_unclaimed_rewards = transactions let height_to_unclaimed_rewards = transactions
.indexes_to_unclaimed_rewards .indexes_to_unclaimed_rewards
.sats .sats

View File

@@ -106,8 +106,8 @@ impl Vecs {
"value", "value",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().inputindex_to_outputindex.boxed_clone(), indexer.vecs.inputindex_to_outputindex.boxed_clone(),
indexer.vecs().outputindex_to_value.boxed_clone(), indexer.vecs.outputindex_to_value.boxed_clone(),
|index: InputIndex, inputindex_to_outputindex_iter, outputindex_to_value_iter| { |index: InputIndex, inputindex_to_outputindex_iter, outputindex_to_value_iter| {
inputindex_to_outputindex_iter inputindex_to_outputindex_iter
.next_at(index.unwrap_to_usize()) .next_at(index.unwrap_to_usize())
@@ -133,8 +133,8 @@ impl Vecs {
"weight", "weight",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().txindex_to_base_size.boxed_clone(), indexer.vecs.txindex_to_base_size.boxed_clone(),
indexer.vecs().txindex_to_total_size.boxed_clone(), indexer.vecs.txindex_to_total_size.boxed_clone(),
|index: TxIndex, txindex_to_base_size_iter, txindex_to_total_size_iter| { |index: TxIndex, txindex_to_base_size_iter, txindex_to_total_size_iter| {
let index = index.unwrap_to_usize(); let index = index.unwrap_to_usize();
txindex_to_base_size_iter txindex_to_base_size_iter
@@ -179,7 +179,7 @@ impl Vecs {
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexes.txindex_to_height.boxed_clone(), indexes.txindex_to_height.boxed_clone(),
indexer.vecs().height_to_first_txindex.boxed_clone(), indexer.vecs.height_to_first_txindex.boxed_clone(),
|index: TxIndex, txindex_to_height_iter, height_to_first_txindex_iter| { |index: TxIndex, txindex_to_height_iter, height_to_first_txindex_iter| {
txindex_to_height_iter txindex_to_height_iter
.next_at(index.unwrap_to_usize()) .next_at(index.unwrap_to_usize())
@@ -202,7 +202,7 @@ impl Vecs {
"input_value", "input_value",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().txindex_to_first_inputindex.boxed_clone(), indexer.vecs.txindex_to_first_inputindex.boxed_clone(),
indexes.txindex_to_input_count.boxed_clone(), indexes.txindex_to_input_count.boxed_clone(),
inputindex_to_value.boxed_clone(), inputindex_to_value.boxed_clone(),
|index: TxIndex, |index: TxIndex,
@@ -251,9 +251,9 @@ impl Vecs {
"output_value", "output_value",
version + VERSION + Version::ZERO, version + VERSION + Version::ZERO,
format, format,
indexer.vecs().txindex_to_first_outputindex.boxed_clone(), indexer.vecs.txindex_to_first_outputindex.boxed_clone(),
indexes.txindex_to_output_count.boxed_clone(), indexes.txindex_to_output_count.boxed_clone(),
indexer.vecs().outputindex_to_value.boxed_clone(), indexer.vecs.outputindex_to_value.boxed_clone(),
|index: TxIndex, |index: TxIndex,
txindex_to_first_outputindex_iter, txindex_to_first_outputindex_iter,
txindex_to_output_count_iter, txindex_to_output_count_iter,
@@ -688,8 +688,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
) )
}, },
@@ -713,7 +713,7 @@ impl Vecs {
let compute_indexes_to_tx_vany = let compute_indexes_to_tx_vany =
|indexes_to_tx_vany: &mut ComputedVecsFromHeight<StoredUsize>, txversion| { |indexes_to_tx_vany: &mut ComputedVecsFromHeight<StoredUsize>, txversion| {
let mut txindex_to_txversion_iter = indexer.vecs().txindex_to_txversion.iter(); let mut txindex_to_txversion_iter = indexer.vecs.txindex_to_txversion.iter();
indexes_to_tx_vany.compute_all( indexes_to_tx_vany.compute_all(
indexer, indexer,
indexes, indexes,
@@ -722,8 +722,8 @@ impl Vecs {
|vec, indexer, _, starting_indexes, exit| { |vec, indexer, _, starting_indexes, exit| {
vec.compute_filtered_count_from_indexes( vec.compute_filtered_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
|txindex| { |txindex| {
let v = txindex_to_txversion_iter.unwrap_get_inner(txindex); let v = txindex_to_txversion_iter.unwrap_get_inner(txindex);
v == txversion v == txversion
@@ -739,31 +739,31 @@ impl Vecs {
self.txindex_to_is_coinbase.compute_if_necessary( self.txindex_to_is_coinbase.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
self.txindex_to_weight.compute_if_necessary( self.txindex_to_weight.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
self.txindex_to_vsize.compute_if_necessary( self.txindex_to_vsize.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
self.inputindex_to_value.compute_if_necessary( self.inputindex_to_value.compute_if_necessary(
starting_indexes.inputindex, starting_indexes.inputindex,
&indexer.vecs().inputindex_to_outputindex, &indexer.vecs.inputindex_to_outputindex,
exit, exit,
)?; )?;
self.txindex_to_output_value.compute_if_necessary( self.txindex_to_output_value.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
@@ -775,9 +775,9 @@ impl Vecs {
// |vec, indexer, _, starting_indexes, exit| { // |vec, indexer, _, starting_indexes, exit| {
// vec.compute_sum_from_indexes( // vec.compute_sum_from_indexes(
// starting_indexes.txindex, // starting_indexes.txindex,
// &indexer.vecs().txindex_to_first_outputindex, // &indexer.vecs.txindex_to_first_outputindex,
// self.indexes_to_output_count.txindex.as_ref().unwrap(), // self.indexes_to_output_count.txindex.as_ref().unwrap(),
// &indexer.vecs().outputindex_to_value, // &indexer.vecs.outputindex_to_value,
// exit, // exit,
// ) // )
// }, // },
@@ -785,7 +785,7 @@ impl Vecs {
self.txindex_to_input_value.compute_if_necessary( self.txindex_to_input_value.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
@@ -797,7 +797,7 @@ impl Vecs {
// |vec, indexer, _, starting_indexes, exit| { // |vec, indexer, _, starting_indexes, exit| {
// vec.compute_sum_from_indexes( // vec.compute_sum_from_indexes(
// starting_indexes.txindex, // starting_indexes.txindex,
// &indexer.vecs().txindex_to_first_inputindex, // &indexer.vecs.txindex_to_first_inputindex,
// self.indexes_to_input_count.txindex.as_ref().unwrap(), // self.indexes_to_input_count.txindex.as_ref().unwrap(),
// &self.inputindex_to_value, // &self.inputindex_to_value,
// exit, // exit,
@@ -807,13 +807,13 @@ impl Vecs {
self.txindex_to_fee.compute_if_necessary( self.txindex_to_fee.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
self.txindex_to_feerate.compute_if_necessary( self.txindex_to_feerate.compute_if_necessary(
starting_indexes.txindex, starting_indexes.txindex,
&indexer.vecs().txindex_to_txid, &indexer.vecs.txindex_to_txid,
exit, exit,
)?; )?;
@@ -858,12 +858,12 @@ impl Vecs {
exit, exit,
|vec, indexer, _, starting_indexes, exit| { |vec, indexer, _, starting_indexes, exit| {
let mut txindex_to_first_outputindex_iter = let mut txindex_to_first_outputindex_iter =
indexer.vecs().txindex_to_first_outputindex.iter(); indexer.vecs.txindex_to_first_outputindex.iter();
let mut txindex_to_output_count_iter = indexes.txindex_to_output_count.iter(); let mut txindex_to_output_count_iter = indexes.txindex_to_output_count.iter();
let mut outputindex_to_value_iter = indexer.vecs().outputindex_to_value.iter(); let mut outputindex_to_value_iter = indexer.vecs.outputindex_to_value.iter();
vec.compute_transform( vec.compute_transform(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_txindex, &indexer.vecs.height_to_first_txindex,
|(height, txindex, ..)| { |(height, txindex, ..)| {
let first_outputindex = txindex_to_first_outputindex_iter let first_outputindex = txindex_to_first_outputindex_iter
.unwrap_get_inner(txindex) .unwrap_get_inner(txindex)
@@ -933,8 +933,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_p2aindex, &indexer.vecs.height_to_first_p2aindex,
&indexer.vecs().p2aindex_to_p2abytes, &indexer.vecs.p2aindex_to_p2abytes,
exit, exit,
) )
}, },
@@ -948,8 +948,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_p2msindex, &indexer.vecs.height_to_first_p2msindex,
&indexer.vecs().p2msindex_to_txindex, &indexer.vecs.p2msindex_to_txindex,
exit, exit,
) )
}, },
@@ -963,8 +963,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_p2pk33index, &indexer.vecs.height_to_first_p2pk33index,
&indexer.vecs().p2pk33index_to_p2pk33bytes, &indexer.vecs.p2pk33index_to_p2pk33bytes,
exit, exit,
) )
}, },
@@ -978,8 +978,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_p2pk65index, &indexer.vecs.height_to_first_p2pk65index,
&indexer.vecs().p2pk65index_to_p2pk65bytes, &indexer.vecs.p2pk65index_to_p2pk65bytes,
exit, exit,
) )
}, },
@@ -993,8 +993,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_p2pkhindex, &indexer.vecs.height_to_first_p2pkhindex,
&indexer.vecs().p2pkhindex_to_p2pkhbytes, &indexer.vecs.p2pkhindex_to_p2pkhbytes,
exit, exit,
) )
}, },
@@ -1008,8 +1008,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_p2shindex, &indexer.vecs.height_to_first_p2shindex,
&indexer.vecs().p2shindex_to_p2shbytes, &indexer.vecs.p2shindex_to_p2shbytes,
exit, exit,
) )
}, },
@@ -1023,8 +1023,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_p2trindex, &indexer.vecs.height_to_first_p2trindex,
&indexer.vecs().p2trindex_to_p2trbytes, &indexer.vecs.p2trindex_to_p2trbytes,
exit, exit,
) )
}, },
@@ -1038,8 +1038,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_p2wpkhindex, &indexer.vecs.height_to_first_p2wpkhindex,
&indexer.vecs().p2wpkhindex_to_p2wpkhbytes, &indexer.vecs.p2wpkhindex_to_p2wpkhbytes,
exit, exit,
) )
}, },
@@ -1053,8 +1053,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_p2wshindex, &indexer.vecs.height_to_first_p2wshindex,
&indexer.vecs().p2wshindex_to_p2wshbytes, &indexer.vecs.p2wshindex_to_p2wshbytes,
exit, exit,
) )
}, },
@@ -1068,8 +1068,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_opreturnindex, &indexer.vecs.height_to_first_opreturnindex,
&indexer.vecs().opreturnindex_to_txindex, &indexer.vecs.opreturnindex_to_txindex,
exit, exit,
) )
}, },
@@ -1083,8 +1083,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_unknownoutputindex, &indexer.vecs.height_to_first_unknownoutputindex,
&indexer.vecs().unknownoutputindex_to_txindex, &indexer.vecs.unknownoutputindex_to_txindex,
exit, exit,
) )
}, },
@@ -1098,8 +1098,8 @@ impl Vecs {
|v, indexer, _, starting_indexes, exit| { |v, indexer, _, starting_indexes, exit| {
v.compute_count_from_indexes( v.compute_count_from_indexes(
starting_indexes.height, starting_indexes.height,
&indexer.vecs().height_to_first_emptyoutputindex, &indexer.vecs.height_to_first_emptyoutputindex,
&indexer.vecs().emptyoutputindex_to_txindex, &indexer.vecs.emptyoutputindex_to_txindex,
exit, exit,
) )
}, },

View File

@@ -2,8 +2,10 @@
mod enums; mod enums;
mod structs; mod structs;
mod traits;
mod utils; mod utils;
pub use enums::*; pub use enums::*;
pub use structs::*; pub use structs::*;
pub use traits::*;
pub use utils::*; pub use utils::*;

View File

@@ -7,7 +7,7 @@ use serde::Serialize;
// use color_eyre::eyre::eyre; // use color_eyre::eyre::eyre;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::{CheckedSub, Error}; use crate::{CheckedSub, Error, Printable};
use super::Date; use super::Date;
@@ -93,3 +93,13 @@ impl fmt::Display for DateIndex {
write!(f, "{}", self.0) write!(f, "{}", self.0)
} }
} }
impl Printable for DateIndex {
fn to_string() -> &'static str {
"dateindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["d", "date", "dateindex"]
}
}

View File

@@ -3,7 +3,7 @@ use std::{fmt::Debug, ops::Add};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::{Date, DateIndex, YearIndex}; use super::{Date, DateIndex, YearIndex};
@@ -83,3 +83,13 @@ impl From<YearIndex> for DecadeIndex {
} }
} }
} }
impl Printable for DecadeIndex {
fn to_string() -> &'static str {
"decadeindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["decade", "decadeindex"]
}
}

View File

@@ -6,7 +6,7 @@ use std::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::Height; use super::Height;
@@ -80,3 +80,13 @@ impl CheckedSub for DifficultyEpoch {
self.0.checked_sub(rhs.0).map(Self) self.0.checked_sub(rhs.0).map(Self)
} }
} }
impl Printable for DifficultyEpoch {
fn to_string() -> &'static str {
"difficultyepoch"
}
fn to_possible_strings() -> &'static [&'static str] {
&["difficulty", "difficultyepoch"]
}
}

View File

@@ -6,7 +6,7 @@ use std::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::Height; use super::Height;
@@ -86,3 +86,13 @@ impl Div<usize> for HalvingEpoch {
Self::from(self.0 as usize / rhs) Self::from(self.0 as usize / rhs)
} }
} }
impl Printable for HalvingEpoch {
fn to_string() -> &'static str {
"halvingepoch"
}
fn to_possible_strings() -> &'static [&'static str] {
&["halving", "halvingepoch"]
}
}

View File

@@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use zerocopy::{FromBytes, IntoBytes}; use zerocopy::{FromBytes, IntoBytes};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::StoredUsize; use super::StoredUsize;
@@ -209,3 +209,13 @@ impl From<Height> for byteview::ByteView {
Self::new(value.as_bytes()) Self::new(value.as_bytes())
} }
} }
impl Printable for Height {
fn to_string() -> &'static str {
"height"
}
fn to_possible_strings() -> &'static [&'static str] {
&["h", "height"]
}
}

View File

@@ -4,7 +4,7 @@ use derive_deref::{Deref, DerefMut};
use serde::Serialize; use serde::Serialize;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::Vin; use super::Vin;
@@ -96,3 +96,13 @@ impl From<InputIndex> for usize {
value.0 as usize value.0 as usize
} }
} }
impl Printable for InputIndex {
fn to_string() -> &'static str {
"inputindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["in", "inputindex"]
}
}

View File

@@ -3,7 +3,7 @@ use std::{fmt::Debug, ops::Add};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::{Date, DateIndex, YearIndex}; use super::{Date, DateIndex, YearIndex};
@@ -68,3 +68,13 @@ impl CheckedSub for MonthIndex {
self.0.checked_sub(rhs.0).map(Self) self.0.checked_sub(rhs.0).map(Self)
} }
} }
impl Printable for MonthIndex {
fn to_string() -> &'static str {
"monthindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["m", "month", "monthindex"]
}
}

View File

@@ -4,7 +4,7 @@ use derive_deref::{Deref, DerefMut};
use serde::Serialize; use serde::Serialize;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::Vout; use super::Vout;
@@ -102,3 +102,13 @@ impl From<OutputIndex> for usize {
value.0 as usize value.0 as usize
} }
} }
impl Printable for OutputIndex {
fn to_string() -> &'static str {
"outputindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["out", "outputindex"]
}
}

View File

@@ -6,7 +6,7 @@ use serde::Serialize;
use zerocopy::{FromBytes, IntoBytes}; use zerocopy::{FromBytes, IntoBytes};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
#[derive( #[derive(
Debug, Debug,
@@ -137,6 +137,15 @@ impl CheckedSub<EmptyOutputIndex> for EmptyOutputIndex {
self.0.0.checked_sub(rhs.0.0).map(OutputTypeIndex).map(Self) self.0.0.checked_sub(rhs.0.0).map(OutputTypeIndex).map(Self)
} }
} }
impl Printable for EmptyOutputIndex {
fn to_string() -> &'static str {
"emptyoutputindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["empty", "emptyoutputindex"]
}
}
#[derive( #[derive(
Debug, Debug,
@@ -183,6 +192,16 @@ impl CheckedSub<P2MSIndex> for P2MSIndex {
} }
} }
impl Printable for P2MSIndex {
fn to_string() -> &'static str {
"p2msindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["ms", "p2ms", "p2msindex"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -228,6 +247,16 @@ impl CheckedSub<P2AIndex> for P2AIndex {
} }
} }
impl Printable for P2AIndex {
fn to_string() -> &'static str {
"p2aindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["a", "p2a", "p2aindex"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -273,6 +302,16 @@ impl CheckedSub<OpReturnIndex> for OpReturnIndex {
} }
} }
impl Printable for OpReturnIndex {
fn to_string() -> &'static str {
"opreturnindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["op", "opreturn", "opreturnindex"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -318,6 +357,16 @@ impl CheckedSub<UnknownOutputIndex> for UnknownOutputIndex {
} }
} }
impl Printable for UnknownOutputIndex {
fn to_string() -> &'static str {
"unknownoutputindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["unknown", "unknownoutputindex"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -363,6 +412,16 @@ impl CheckedSub<P2PK33Index> for P2PK33Index {
} }
} }
impl Printable for P2PK33Index {
fn to_string() -> &'static str {
"p2pk33index"
}
fn to_possible_strings() -> &'static [&'static str] {
&["pk33", "p2pk33", "p2pk33index"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -408,6 +467,15 @@ impl CheckedSub<P2PK65Index> for P2PK65Index {
} }
} }
impl Printable for P2PK65Index {
fn to_string() -> &'static str {
"p2pk65index"
}
fn to_possible_strings() -> &'static [&'static str] {
&["pk65", "p2pk65", "p2pk65index"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -453,6 +521,16 @@ impl CheckedSub<P2PKHIndex> for P2PKHIndex {
} }
} }
impl Printable for P2PKHIndex {
fn to_string() -> &'static str {
"p2pkhindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["pkh", "p2pkh", "p2pkhindex"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -498,6 +576,16 @@ impl CheckedSub<P2SHIndex> for P2SHIndex {
} }
} }
impl Printable for P2SHIndex {
fn to_string() -> &'static str {
"p2shindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["sh", "p2sh", "p2shindex"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -543,6 +631,16 @@ impl CheckedSub<P2TRIndex> for P2TRIndex {
} }
} }
impl Printable for P2TRIndex {
fn to_string() -> &'static str {
"p2trindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["tr", "p2tr", "p2trindex"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -588,6 +686,16 @@ impl CheckedSub<P2WPKHIndex> for P2WPKHIndex {
} }
} }
impl Printable for P2WPKHIndex {
fn to_string() -> &'static str {
"p2wpkhindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["wpkh", "p2wpkh", "p2wpkhindex"]
}
}
#[derive( #[derive(
Debug, Debug,
PartialEq, PartialEq,
@@ -632,3 +740,13 @@ impl CheckedSub<P2WSHIndex> for P2WSHIndex {
self.0.0.checked_sub(rhs.0.0).map(OutputTypeIndex).map(Self) self.0.0.checked_sub(rhs.0.0).map(OutputTypeIndex).map(Self)
} }
} }
impl Printable for P2WSHIndex {
fn to_string() -> &'static str {
"p2wshindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["wsh", "p2wsh", "p2wshindex"]
}
}

View File

@@ -3,7 +3,7 @@ use std::{fmt::Debug, ops::Add};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::MonthIndex; use super::MonthIndex;
@@ -62,3 +62,13 @@ impl CheckedSub for QuarterIndex {
self.0.checked_sub(rhs.0).map(Self) self.0.checked_sub(rhs.0).map(Self)
} }
} }
impl Printable for QuarterIndex {
fn to_string() -> &'static str {
"quarterindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["q", "quarter", "quarterindex"]
}
}

View File

@@ -8,7 +8,7 @@ use derive_deref::Deref;
use serde::Serialize; use serde::Serialize;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::{Dollars, StoredF64}; use super::{Dollars, StoredF64};
@@ -148,3 +148,13 @@ impl Ord for StoredF32 {
} }
} }
} }
impl Printable for StoredF32 {
fn to_string() -> &'static str {
"f32"
}
fn to_possible_strings() -> &'static [&'static str] {
&["f32"]
}
}

View File

@@ -8,7 +8,7 @@ use derive_deref::Deref;
use serde::Serialize; use serde::Serialize;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::{Bitcoin, CheckedSub, Dollars}; use crate::{Bitcoin, CheckedSub, Dollars, Printable};
#[derive( #[derive(
Debug, Deref, Default, Clone, Copy, FromBytes, Immutable, IntoBytes, KnownLayout, Serialize, Debug, Deref, Default, Clone, Copy, FromBytes, Immutable, IntoBytes, KnownLayout, Serialize,
@@ -120,3 +120,13 @@ impl From<Bitcoin> for StoredF64 {
Self(f64::from(value)) Self(f64::from(value))
} }
} }
impl Printable for StoredF64 {
fn to_string() -> &'static str {
"f64"
}
fn to_possible_strings() -> &'static [&'static str] {
&["f64"]
}
}

View File

@@ -4,7 +4,7 @@ use derive_deref::Deref;
use serde::Serialize; use serde::Serialize;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::{ use super::{
EmptyOutputIndex, OpReturnIndex, P2AIndex, P2MSIndex, P2PK33Index, P2PK65Index, P2PKHIndex, EmptyOutputIndex, OpReturnIndex, P2AIndex, P2MSIndex, P2PK33Index, P2PK65Index, P2PKHIndex,
@@ -164,3 +164,13 @@ impl From<EmptyOutputIndex> for StoredU32 {
Self::from(usize::from(value)) Self::from(usize::from(value))
} }
} }
impl Printable for StoredU32 {
fn to_string() -> &'static str {
"u32"
}
fn to_possible_strings() -> &'static [&'static str] {
&["u32"]
}
}

View File

@@ -4,7 +4,7 @@ use derive_deref::Deref;
use serde::Serialize; use serde::Serialize;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::{InputIndex, OutputIndex, TxIndex}; use super::{InputIndex, OutputIndex, TxIndex};
@@ -97,3 +97,13 @@ impl From<OutputIndex> for StoredU64 {
Self(*value) Self(*value)
} }
} }
impl Printable for StoredU64 {
fn to_string() -> &'static str {
"u64"
}
fn to_possible_strings() -> &'static [&'static str] {
&["u64"]
}
}

View File

@@ -4,7 +4,7 @@ use derive_deref::Deref;
use serde::Serialize; use serde::Serialize;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
pub type StoredPhantom = StoredU8; pub type StoredPhantom = StoredU8;
@@ -93,3 +93,13 @@ impl From<StoredU8> for usize {
value.0 as usize value.0 as usize
} }
} }
impl Printable for StoredU8 {
fn to_string() -> &'static str {
"u8"
}
fn to_possible_strings() -> &'static [&'static str] {
&["u8"]
}
}

View File

@@ -4,7 +4,7 @@ use derive_deref::{Deref, DerefMut};
use serde::Serialize; use serde::Serialize;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::{ use super::{
DateIndex, EmptyOutputIndex, Height, InputIndex, MonthIndex, OpReturnIndex, OutputIndex, DateIndex, EmptyOutputIndex, Height, InputIndex, MonthIndex, OpReturnIndex, OutputIndex,
@@ -193,3 +193,13 @@ impl From<EmptyOutputIndex> for StoredUsize {
Self::from(usize::from(value)) Self::from(usize::from(value))
} }
} }
impl Printable for StoredUsize {
fn to_string() -> &'static str {
"usize"
}
fn to_possible_strings() -> &'static [&'static str] {
&["usize"]
}
}

View File

@@ -6,7 +6,7 @@ use serde::Serialize;
use zerocopy::{FromBytes, IntoBytes}; use zerocopy::{FromBytes, IntoBytes};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::StoredU32; use super::StoredU32;
@@ -111,3 +111,13 @@ impl From<TxIndex> for StoredU32 {
Self::from(value.0) Self::from(value.0)
} }
} }
impl Printable for TxIndex {
fn to_string() -> &'static str {
"txindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["tx", "txindex"]
}
}

View File

@@ -3,7 +3,7 @@ use std::{fmt::Debug, ops::Add};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::{Date, DateIndex}; use super::{Date, DateIndex};
@@ -86,3 +86,13 @@ impl CheckedSub for WeekIndex {
self.0.checked_sub(rhs.0).map(Self) self.0.checked_sub(rhs.0).map(Self)
} }
} }
impl Printable for WeekIndex {
fn to_string() -> &'static str {
"weekindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["w", "week", "weekindex"]
}
}

View File

@@ -3,7 +3,7 @@ use std::{fmt::Debug, ops::Add};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::CheckedSub; use crate::{CheckedSub, Printable};
use super::{Date, DateIndex, MonthIndex}; use super::{Date, DateIndex, MonthIndex};
@@ -80,3 +80,13 @@ impl From<MonthIndex> for YearIndex {
Self((usize::from(value) / 12) as u8) Self((usize::from(value) / 12) as u8)
} }
} }
impl Printable for YearIndex {
fn to_string() -> &'static str {
"yearindex"
}
fn to_possible_strings() -> &'static [&'static str] {
&["y", "year", "yearindex"]
}
}

View File

@@ -0,0 +1,3 @@
mod printable;
pub use printable::*;

View File

@@ -0,0 +1,4 @@
pub trait Printable {
fn to_string() -> &'static str;
fn to_possible_strings() -> &'static [&'static str];
}

View File

@@ -11,7 +11,6 @@ repository.workspace = true
brk_core = { workspace = true } brk_core = { workspace = true }
brk_logger = { workspace = true } brk_logger = { workspace = true }
color-eyre = { workspace = true } color-eyre = { workspace = true }
jiff = { workspace = true }
log = { workspace = true } log = { workspace = true }
minreq = { workspace = true } minreq = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }

View File

@@ -31,4 +31,4 @@
</a> </a>
</p> </p>
A crate that can fetch the Bitcoin price, either by date or height, from Binance and Kibo. A crate that can fetch the Bitcoin price, either by date or height, from Binance, Kraken and the main instance of BRK.

View File

@@ -13,10 +13,8 @@ pub struct BRK {
dateindex_to_ohlc: BTreeMap<DateIndex, Vec<OHLCCents>>, dateindex_to_ohlc: BTreeMap<DateIndex, Vec<OHLCCents>>,
} }
const API_URL: &str = "https://bitcoinresearchkit.org/api"; const API_URL: &str = "https://bitcoinresearchkit.org/api/vecs";
const RETRIES: usize = 10; const RETRIES: usize = 10;
const CHUNK_SIZE: usize = 10_000; const CHUNK_SIZE: usize = 10_000;
impl BRK { impl BRK {
@@ -40,11 +38,11 @@ impl BRK {
.unwrap() .unwrap()
.get(usize::from(height.checked_sub(key).unwrap())) .get(usize::from(height.checked_sub(key).unwrap()))
.cloned() .cloned()
.ok_or(eyre!("Couldn't find height in kibo")) .ok_or(eyre!("Couldn't find height in BRK"))
} }
fn fetch_height_prices(height: Height) -> color_eyre::Result<Vec<OHLCCents>> { fn fetch_height_prices(height: Height) -> color_eyre::Result<Vec<OHLCCents>> {
info!("Fetching Kibo height {height} prices..."); info!("Fetching BRK height {height} prices...");
retry( retry(
|_| { |_| {
@@ -89,11 +87,11 @@ impl BRK {
.unwrap() .unwrap()
.get(usize::from(dateindex.checked_sub(key).unwrap())) .get(usize::from(dateindex.checked_sub(key).unwrap()))
.cloned() .cloned()
.ok_or(eyre!("Couldn't find date in kibo")) .ok_or(eyre!("Couldn't find date in BRK"))
} }
fn fetch_date_prices(dateindex: DateIndex) -> color_eyre::Result<Vec<OHLCCents>> { fn fetch_date_prices(dateindex: DateIndex) -> color_eyre::Result<Vec<OHLCCents>> {
info!("Fetching Kibo dateindex {dateindex} prices..."); info!("Fetching BRK dateindex {dateindex} prices...");
retry( retry(
|_| { |_| {

View File

@@ -24,12 +24,9 @@ fn main() -> color_eyre::Result<()> {
let outputs = Path::new("../../_outputs"); let outputs = Path::new("../../_outputs");
let mut indexer = Indexer::new(outputs, false)?; let mut indexer = Indexer::forced_import(outputs)?;
indexer.import_stores()?; indexer.index(&parser, rpc, &exit, false)?;
indexer.import_vecs()?;
indexer.index(&parser, rpc, &exit)?;
dbg!(i.elapsed()); dbg!(i.elapsed());

View File

@@ -3,12 +3,7 @@
#![doc = include_str!("../examples/main.rs")] #![doc = include_str!("../examples/main.rs")]
#![doc = "```"] #![doc = "```"]
use std::{ use std::{collections::BTreeMap, path::Path, str::FromStr, thread};
collections::BTreeMap,
path::{Path, PathBuf},
str::FromStr,
thread,
};
use brk_core::{ use brk_core::{
AddressBytes, AddressBytesHash, BlockHash, BlockHashPrefix, Height, InputIndex, OutputIndex, AddressBytes, AddressBytesHash, BlockHash, BlockHashPrefix, Height, InputIndex, OutputIndex,
@@ -21,7 +16,6 @@ use brk_exit::Exit;
use brk_parser::Parser; use brk_parser::Parser;
use brk_vec::{AnyVec, VecIterator}; use brk_vec::{AnyVec, VecIterator};
use color_eyre::eyre::{ContextCompat, eyre}; use color_eyre::eyre::{ContextCompat, eyre};
use fjall::TransactionalKeyspace;
use log::{error, info}; use log::{error, info};
use rayon::prelude::*; use rayon::prelude::*;
mod indexes; mod indexes;
@@ -38,71 +32,41 @@ const VERSION: Version = Version::ONE;
#[derive(Clone)] #[derive(Clone)]
pub struct Indexer { pub struct Indexer {
path: PathBuf, pub vecs: Vecs,
vecs: Option<Vecs>, pub stores: Stores,
stores: Option<Stores>,
check_collisions: bool,
} }
impl Indexer { impl Indexer {
pub fn new(outputs_dir: &Path, check_collisions: bool) -> color_eyre::Result<Self> { pub fn forced_import(outputs_dir: &Path) -> color_eyre::Result<Self> {
setrlimit()?; setrlimit()?;
Ok(Self { Ok(Self {
path: outputs_dir.to_owned(), vecs: Vecs::forced_import(&outputs_dir.join("vecs/indexed"), VERSION + Version::ZERO)?,
vecs: None, stores: Stores::forced_import(&outputs_dir.join("stores"), VERSION + Version::ZERO)?,
stores: None,
check_collisions,
}) })
} }
pub fn import_vecs(&mut self) -> color_eyre::Result<()> {
self.vecs = Some(Vecs::forced_import(
&self.path.join("vecs/indexed"),
VERSION + Version::ZERO,
)?);
Ok(())
}
/// Do NOT import multiple times are things will break !!!
/// Clone struct instead
pub fn import_stores(&mut self) -> color_eyre::Result<()> {
self.stores = Some(Stores::forced_import(
&self.path.join("stores"),
VERSION + Version::ZERO,
)?);
Ok(())
}
pub fn index( pub fn index(
&mut self, &mut self,
parser: &Parser, parser: &Parser,
rpc: &'static bitcoincore_rpc::Client, rpc: &'static bitcoincore_rpc::Client,
exit: &Exit, exit: &Exit,
check_collisions: bool,
) -> color_eyre::Result<Indexes> { ) -> color_eyre::Result<Indexes> {
let starting_indexes = Indexes::try_from(( let starting_indexes = Indexes::try_from((&mut self.vecs, &self.stores, rpc))
self.vecs.as_mut().unwrap(), .unwrap_or_else(|_report| {
self.stores.as_ref().unwrap(), let indexes = Indexes::default();
rpc, indexes.push_if_needed(&mut self.vecs).unwrap();
)) indexes
.unwrap_or_else(|_report| { });
let indexes = Indexes::default();
indexes.push_if_needed(self.vecs.as_mut().unwrap()).unwrap();
indexes
});
exit.block(); exit.block();
self.stores self.stores
.as_mut() .rollback_if_needed(&mut self.vecs, &starting_indexes)?;
.unwrap() self.vecs.rollback_if_needed(&starting_indexes)?;
.rollback_if_needed(self.vecs.as_mut().unwrap(), &starting_indexes)?;
self.vecs
.as_mut()
.unwrap()
.rollback_if_needed(&starting_indexes)?;
exit.release(); exit.release();
let vecs = self.vecs.as_mut().unwrap(); let vecs = &mut self.vecs;
let stores = self.stores.as_mut().unwrap(); let stores = &mut self.stores;
// Cloned because we want to return starting indexes for the computer // Cloned because we want to return starting indexes for the computer
let mut idxs = starting_indexes.clone(); let mut idxs = starting_indexes.clone();
@@ -144,7 +108,7 @@ impl Indexer {
idxs.height = height; idxs.height = height;
// Used to check rapidhash collisions // Used to check rapidhash collisions
let check_collisions = self.check_collisions && height > Height::new(COLLISIONS_CHECKED_UP_TO); let check_collisions = check_collisions && height > Height::new(COLLISIONS_CHECKED_UP_TO);
let blockhash = BlockHash::from(blockhash); let blockhash = BlockHash::from(blockhash);
let blockhash_prefix = BlockHashPrefix::from(&blockhash); let blockhash_prefix = BlockHashPrefix::from(&blockhash);
@@ -739,18 +703,6 @@ impl Indexer {
Ok(starting_indexes) Ok(starting_indexes)
} }
pub fn vecs(&self) -> &Vecs {
self.vecs.as_ref().unwrap()
}
pub fn stores(&self) -> &Stores {
self.stores.as_ref().unwrap()
}
pub fn keyspace(&self) -> &TransactionalKeyspace {
&self.stores().keyspace
}
} }
#[derive(Debug)] #[derive(Debug)]

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "brk_query" name = "brk_interface"
description = "A library that finds requested datasets" description = "An interface to BRK's engine"
license.workspace = true license.workspace = true
edition.workspace = true edition.workspace = true
version.workspace = true version.workspace = true
@@ -12,10 +12,10 @@ brk_core = { workspace = true }
brk_computer = { workspace = true } brk_computer = { workspace = true }
brk_indexer = { workspace = true } brk_indexer = { workspace = true }
brk_vec = { workspace = true } brk_vec = { workspace = true }
clap = { workspace = true }
clap_derive = { workspace = true }
color-eyre = { workspace = true } color-eyre = { workspace = true }
derive_deref = { workspace = true } derive_deref = { workspace = true }
rmcp = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
serde_with = "3.13.0" serde_with = "3.13.0"

View File

@@ -1,4 +1,4 @@
# BRK Query # BRK Interface
<p align="left"> <p align="left">
<a href="https://github.com/bitcoinresearchkit/brk"> <a href="https://github.com/bitcoinresearchkit/brk">
@@ -7,15 +7,15 @@
<a href="https://github.com/bitcoinresearchkit/brk/blob/main/LICENSE.md"> <a href="https://github.com/bitcoinresearchkit/brk/blob/main/LICENSE.md">
<img src="https://img.shields.io/crates/l/brk" alt="License" /> <img src="https://img.shields.io/crates/l/brk" alt="License" />
</a> </a>
<a href="https://crates.io/crates/brk_query"> <a href="https://crates.io/crates/brk_interface">
<img src="https://img.shields.io/crates/v/brk_query" alt="Version" /> <img src="https://img.shields.io/crates/v/brk_interface" alt="Version" />
</a> </a>
<a href="https://docs.rs/brk_query"> <a href="https://docs.rs/brk_interface">
<img src="https://img.shields.io/docsrs/brk_query" alt="Documentation" /> <img src="https://img.shields.io/docsrs/brk_interface" alt="Documentation" />
</a> </a>
<img src="https://img.shields.io/crates/size/brk_query" alt="Size" /> <img src="https://img.shields.io/crates/size/brk_interface" alt="Size" />
<a href="https://deps.rs/crate/brk_query"> <a href="https://deps.rs/crate/brk_interface">
<img src="https://deps.rs/crate/brk_query/latest/status.svg" alt="Dependency status"> <img src="https://deps.rs/crate/brk_interface/latest/status.svg" alt="Dependency status">
</a> </a>
<a href="https://discord.gg/HaR3wpH3nr"> <a href="https://discord.gg/HaR3wpH3nr">
<img src="https://img.shields.io/discord/1350431684562124850?label=discord" alt="Discord" /> <img src="https://img.shields.io/discord/1350431684562124850?label=discord" alt="Discord" />

View File

@@ -0,0 +1,33 @@
use std::path::Path;
use brk_computer::Computer;
use brk_indexer::Indexer;
use brk_interface::{Index, Interface, Params, ParamsOpt};
use brk_vec::{Computation, Format};
pub fn main() -> color_eyre::Result<()> {
color_eyre::install()?;
let outputs_dir = Path::new("../../_outputs");
let format = Format::Compressed;
let indexer = Indexer::forced_import(outputs_dir)?;
let computer = Computer::forced_import(outputs_dir, &indexer, Computation::Lazy, None, format)?;
let interface = Interface::build(&indexer, &computer);
dbg!(interface.search_and_format(Params {
index: Index::Height,
ids: vec!["date"].into(),
rest: ParamsOpt::default().set_from(-1),
})?);
dbg!(interface.search_and_format(Params {
index: Index::Height,
ids: vec!["date", "timestamp"].into(),
rest: ParamsOpt::default().set_from(-10).set_count(5),
})?);
Ok(())
}

View File

@@ -1,9 +1,9 @@
use clap_derive::ValueEnum;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use rmcp::schemars::JsonSchema;
use serde::Deserialize; use serde::Deserialize;
#[allow(clippy::upper_case_acronyms)] #[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, JsonSchema)]
pub enum Format { pub enum Format {
#[serde(alias = "json")] #[serde(alias = "json")]
JSON, JSON,
@@ -12,7 +12,6 @@ pub enum Format {
#[serde(alias = "tsv")] #[serde(alias = "tsv")]
TSV, TSV,
#[serde(alias = "md", alias = "markdown")] #[serde(alias = "md", alias = "markdown")]
#[value(alias("markdown"))]
MD, MD,
} }

View File

@@ -1,8 +1,16 @@
use std::fmt::{self, Debug}; use std::fmt::{self, Debug};
use brk_core::{
DateIndex, DecadeIndex, DifficultyEpoch, EmptyOutputIndex, HalvingEpoch, Height, InputIndex,
MonthIndex, OpReturnIndex, OutputIndex, P2AIndex, P2MSIndex, P2PK33Index, P2PK65Index,
P2PKHIndex, P2SHIndex, P2TRIndex, P2WPKHIndex, P2WSHIndex, Printable, QuarterIndex, TxIndex,
UnknownOutputIndex, WeekIndex, YearIndex,
};
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use schemars::JsonSchema;
use serde::{Deserialize, de::Error};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, JsonSchema)]
pub enum Index { pub enum Index {
DateIndex, DateIndex,
DecadeIndex, DecadeIndex,
@@ -60,53 +68,51 @@ impl Index {
] ]
} }
pub fn possible_values(&self) -> &[&str] { pub fn possible_values(&self) -> &'static [&'static str] {
// Always have the "correct" id at the end
match self { match self {
Self::DateIndex => &["d", "date", "dateindex"], Self::DateIndex => DateIndex::to_possible_strings(),
Self::DecadeIndex => &["decade", "decadeindex"], Self::DecadeIndex => DecadeIndex::to_possible_strings(),
Self::DifficultyEpoch => &["difficulty", "difficultyepoch"], Self::DifficultyEpoch => DifficultyEpoch::to_possible_strings(),
Self::EmptyOutputIndex => &["empty", "emptyoutputindex"], Self::EmptyOutputIndex => EmptyOutputIndex::to_possible_strings(),
Self::HalvingEpoch => &["halving", "halvingepoch"], Self::HalvingEpoch => HalvingEpoch::to_possible_strings(),
Self::Height => &["h", "height"], Self::Height => Height::to_possible_strings(),
Self::InputIndex => &["txin", "inputindex"], Self::InputIndex => InputIndex::to_possible_strings(),
Self::MonthIndex => &["m", "month", "monthindex"], Self::MonthIndex => MonthIndex::to_possible_strings(),
Self::OpReturnIndex => &["opreturn", "opreturnindex"], Self::OpReturnIndex => OpReturnIndex::to_possible_strings(),
Self::OutputIndex => &["txout", "outputindex"], Self::OutputIndex => OutputIndex::to_possible_strings(),
Self::P2AIndex => &["p2a", "p2aindex"], Self::P2AIndex => P2AIndex::to_possible_strings(),
Self::P2MSIndex => &["p2ms", "p2msindex"], Self::P2MSIndex => P2MSIndex::to_possible_strings(),
Self::P2PK33Index => &["p2pk33", "p2pk33index"], Self::P2PK33Index => P2PK33Index::to_possible_strings(),
Self::P2PK65Index => &["p2pk65", "p2pk65index"], Self::P2PK65Index => P2PK65Index::to_possible_strings(),
Self::P2PKHIndex => &["p2pkh", "p2pkhindex"], Self::P2PKHIndex => P2PKHIndex::to_possible_strings(),
Self::P2SHIndex => &["p2sh", "p2shindex"], Self::P2SHIndex => P2SHIndex::to_possible_strings(),
Self::P2TRIndex => &["p2tr", "p2trindex"], Self::P2TRIndex => P2TRIndex::to_possible_strings(),
Self::P2WPKHIndex => &["p2wpkh", "p2wpkhindex"], Self::P2WPKHIndex => P2WPKHIndex::to_possible_strings(),
Self::P2WSHIndex => &["p2wsh", "p2wshindex"], Self::P2WSHIndex => P2WSHIndex::to_possible_strings(),
Self::QuarterIndex => &["q", "quarter", "quarterindex"], Self::QuarterIndex => QuarterIndex::to_possible_strings(),
Self::TxIndex => &["tx", "txindex"], Self::TxIndex => TxIndex::to_possible_strings(),
Self::UnknownOutputIndex => &["unknown", "unknownoutputindex"], Self::UnknownOutputIndex => UnknownOutputIndex::to_possible_strings(),
Self::WeekIndex => &["w", "week", "weekindex"], Self::WeekIndex => WeekIndex::to_possible_strings(),
Self::YearIndex => &["y", "year", "yearindex"], Self::YearIndex => YearIndex::to_possible_strings(),
} }
} }
pub fn all_possible_values() -> Vec<String> { pub fn all_possible_values() -> Vec<&'static str> {
Self::all() Self::all()
.iter() .into_iter()
.flat_map(|i| i.possible_values().iter().map(|s| s.to_string())) .flat_map(|i| i.possible_values().iter().cloned())
.collect::<Vec<_>>() .collect::<Vec<_>>()
} }
pub fn serialize_short(&self) -> String { pub fn serialize_short(&self) -> &'static str {
self.possible_values() self.possible_values()
.iter() .iter()
.find(|str| str.len() > 1) .find(|str| str.len() > 1)
.unwrap() .unwrap()
.to_string()
} }
pub fn serialize_long(&self) -> String { pub fn serialize_long(&self) -> &'static str {
self.possible_values().last().unwrap().to_string() self.possible_values().last().unwrap()
} }
} }
@@ -148,6 +154,20 @@ impl TryFrom<&str> for Index {
impl fmt::Display for Index { impl fmt::Display for Index {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(self, f) write!(f, "{:?}", self)
}
}
impl<'de> Deserialize<'de> for Index {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let str = String::deserialize(deserializer)?;
if let Ok(index) = Index::try_from(str.as_str()) {
Ok(index)
} else {
Err(Error::custom("Bad index"))
}
} }
} }

View File

@@ -3,6 +3,8 @@
#![doc = include_str!("../examples/main.rs")] #![doc = include_str!("../examples/main.rs")]
#![doc = "```"] #![doc = "```"]
use std::collections::BTreeMap;
use brk_computer::Computer; use brk_computer::Computer;
use brk_core::Result; use brk_core::Result;
use brk_indexer::Indexer; use brk_indexer::Indexer;
@@ -11,52 +13,43 @@ use tabled::settings::Style;
mod format; mod format;
mod index; mod index;
mod maybe_ids;
mod output; mod output;
mod params; mod params;
mod table; mod table;
mod vec_trees; mod vecs;
pub use format::Format; pub use format::Format;
pub use index::Index; pub use index::Index;
pub use output::{Output, Value}; pub use output::{Output, Value};
pub use params::{Params, ParamsOpt}; pub use params::{Pagination, Params, ParamsOpt};
pub use table::Tabled; pub use table::Tabled;
use vec_trees::VecTrees; use vecs::Vecs;
pub struct Query<'a> { use crate::vecs::{IdToVec, IndexToVec};
pub vec_trees: VecTrees<'a>,
pub struct Interface<'a> {
vecs: Vecs<'a>,
_indexer: &'a Indexer, _indexer: &'a Indexer,
_computer: &'a Computer, _computer: &'a Computer,
} }
impl<'a> Query<'a> { impl<'a> Interface<'a> {
pub fn build(indexer: &'a Indexer, computer: &'a Computer) -> Self { pub fn build(indexer: &'a Indexer, computer: &'a Computer) -> Self {
let mut vec_trees = VecTrees::default();
indexer
.vecs()
.vecs()
.into_iter()
.for_each(|vec| vec_trees.insert(vec));
computer
.vecs()
.into_iter()
.for_each(|vec| vec_trees.insert(vec));
Self { Self {
vec_trees, vecs: Vecs::build(indexer, computer),
_indexer: indexer, _indexer: indexer,
_computer: computer, _computer: computer,
} }
} }
pub fn search(&self, index: Index, ids: &[&str]) -> Vec<(String, &&dyn AnyCollectableVec)> { pub fn search(&self, params: &Params) -> Vec<(String, &&dyn AnyCollectableVec)> {
let tuples = ids let tuples = params
.ids
.iter() .iter()
.flat_map(|s| { .flat_map(|s| {
s.to_lowercase() s.to_lowercase()
.replace("_", "-") .replace("-", "_")
.split_whitespace() .split_whitespace()
.flat_map(|s| { .flat_map(|s| {
s.split(',') s.split(',')
@@ -65,11 +58,11 @@ impl<'a> Query<'a> {
.collect::<Vec<_>>() .collect::<Vec<_>>()
}) })
.map(|mut id| { .map(|mut id| {
let mut res = self.vec_trees.id_to_index_to_vec.get(&id); let mut res = self.vecs.id_to_index_to_vec.get(id.as_str());
if res.is_none() { if res.is_none() {
if let Ok(index) = Index::try_from(id.as_str()) { if let Ok(index) = Index::try_from(id.as_str()) {
id = index.possible_values().last().unwrap().to_string(); id = index.possible_values().last().unwrap().to_string();
res = self.vec_trees.id_to_index_to_vec.get(&id) res = self.vecs.id_to_index_to_vec.get(id.as_str())
} }
} }
(id, res) (id, res)
@@ -80,17 +73,19 @@ impl<'a> Query<'a> {
tuples tuples
.iter() .iter()
.flat_map(|(str, i_to_v)| i_to_v.get(&index).map(|vec| (str.to_owned(), vec))) .flat_map(|(str, i_to_v)| i_to_v.get(&params.index).map(|vec| (str.to_owned(), vec)))
.collect::<Vec<_>>() .collect::<Vec<_>>()
} }
pub fn format( pub fn format(
&self, &self,
vecs: Vec<(String, &&dyn AnyCollectableVec)>, vecs: Vec<(String, &&dyn AnyCollectableVec)>,
from: Option<i64>, params: &ParamsOpt,
to: Option<i64>,
format: Option<Format>,
) -> color_eyre::Result<Output> { ) -> color_eyre::Result<Output> {
let from = params.from();
let to = params.to();
let format = params.format();
let mut values = vecs let mut values = vecs
.iter() .iter()
.map(|(_, vec)| -> Result<Vec<serde_json::Value>> { .map(|(_, vec)| -> Result<Vec<serde_json::Value>> {
@@ -165,14 +160,53 @@ impl<'a> Query<'a> {
}) })
} }
pub fn search_and_format( pub fn search_and_format(&self, params: Params) -> color_eyre::Result<Output> {
self.format(self.search(&params), &params.rest)
}
pub fn id_to_index_to_vec(&self) -> &BTreeMap<&str, IndexToVec<'_>> {
&self.vecs.id_to_index_to_vec
}
pub fn index_to_id_to_vec(&self) -> &BTreeMap<Index, IdToVec<'_>> {
&self.vecs.index_to_id_to_vec
}
pub fn get_vecid_count(&self) -> usize {
self.vecs.id_count
}
pub fn get_index_count(&self) -> usize {
self.vecs.index_count
}
pub fn get_vec_count(&self) -> usize {
self.vecs.vec_count
}
pub fn get_indexes(&self) -> &[&'static str] {
&self.vecs.indexes
}
pub fn get_accepted_indexes(&self) -> &BTreeMap<&'static str, &'static [&'static str]> {
&self.vecs.accepted_indexes
}
pub fn get_vecids(&self, pagination: Pagination) -> &[&str] {
self.vecs.ids(pagination)
}
pub fn get_indexes_to_vecids(
&self, &self,
index: Index, pagination: Pagination,
ids: &[&str], ) -> BTreeMap<&'static str, Vec<&str>> {
from: Option<i64>, self.vecs.indexes_to_ids(pagination)
to: Option<i64>, }
format: Option<Format>,
) -> color_eyre::Result<Output> { pub fn get_vecids_to_indexes(
self.format(self.search(index, ids), from, to, format) &self,
pagination: Pagination,
) -> BTreeMap<&str, Vec<&'static str>> {
self.vecs.ids_to_indexes(pagination)
} }
} }

View File

@@ -0,0 +1,30 @@
use derive_deref::Deref;
use schemars::JsonSchema;
use serde::Deserialize;
#[derive(Debug, Deref, JsonSchema)]
pub struct MaybeIds(Vec<String>);
impl From<String> for MaybeIds {
fn from(value: String) -> Self {
Self(vec![value])
}
}
impl<'a> From<Vec<&'a str>> for MaybeIds {
fn from(value: Vec<&'a str>) -> Self {
Self(value.iter().map(|s| s.to_string()).collect::<Vec<_>>())
}
}
impl<'de> Deserialize<'de> for MaybeIds {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let str = String::deserialize(deserializer)?;
Ok(MaybeIds(
str.split(",").map(|s| s.to_string()).collect::<Vec<_>>(),
))
}
}

View File

@@ -5,7 +5,7 @@ use tabled::Tabled as TabledTabled;
use crate::Format; use crate::Format;
#[derive(Debug)] #[derive(Debug, Serialize)]
pub enum Output { pub enum Output {
Json(Value), Json(Value),
CSV(String), CSV(String),

View File

@@ -0,0 +1,175 @@
use std::ops::Deref;
use rmcp::schemars::{self, JsonSchema};
use serde::{Deserialize, Deserializer};
use crate::{Format, Index, maybe_ids::MaybeIds};
#[derive(Debug, Deserialize, JsonSchema)]
pub struct Params {
#[serde(alias = "i")]
#[schemars(description = "Index of the values requested")]
pub index: Index,
#[serde(alias = "v")]
#[schemars(description = "Ids of the requested vecs")]
pub ids: MaybeIds,
#[serde(flatten)]
pub rest: ParamsOpt,
}
serde_with::flattened_maybe!(deserialize_rest, "rest");
impl Deref for Params {
type Target = ParamsOpt;
fn deref(&self) -> &Self::Target {
&self.rest
}
}
impl From<((Index, String), ParamsOpt)> for Params {
fn from(((index, id), rest): ((Index, String), ParamsOpt)) -> Self {
Self {
index,
ids: MaybeIds::from(id),
rest,
}
}
}
#[derive(Default, Debug, Deserialize, JsonSchema)]
pub struct ParamsOpt {
#[serde(default, alias = "f", deserialize_with = "de_unquote_i64")]
/// Inclusive starting index, if negative will be from the end
from: Option<i64>,
#[serde(default, alias = "t", deserialize_with = "de_unquote_i64")]
/// Exclusive ending index, if negative will be from the end, overrides 'count'
to: Option<i64>,
#[serde(default, alias = "c", deserialize_with = "de_unquote_usize")]
/// Number of values
count: Option<usize>,
/// Format of the output
format: Option<Format>,
}
impl ParamsOpt {
pub fn set_from(mut self, from: i64) -> Self {
self.from.replace(from);
self
}
pub fn set_to(mut self, to: i64) -> Self {
self.to.replace(to);
self
}
pub fn set_count(mut self, count: usize) -> Self {
self.count.replace(count);
self
}
pub fn set_format(mut self, format: Format) -> Self {
self.format.replace(format);
self
}
pub fn from(&self) -> Option<i64> {
self.from
}
pub fn to(&self) -> Option<i64> {
if self.to.is_none() {
if let Some(c) = self.count {
let c = c as i64;
if let Some(f) = self.from {
if f.is_positive() || f.abs() > c {
return Some(f + c);
}
} else {
return Some(c);
}
}
}
self.to
}
pub fn format(&self) -> Option<Format> {
self.format
}
}
fn de_unquote_i64<'de, D>(deserializer: D) -> Result<Option<i64>, D::Error>
where
D: Deserializer<'de>,
{
let value: Option<serde_json::Value> = Option::deserialize(deserializer)?;
match value {
None => Ok(None),
Some(serde_json::Value::String(mut s)) => {
if s.starts_with('"') && s.ends_with('"') && s.len() >= 2 {
s = s[1..s.len() - 1].to_string();
}
s.parse::<i64>().map(Some).map_err(serde::de::Error::custom)
}
Some(serde_json::Value::Number(n)) => {
// If it's a number, convert it to i64
n.as_i64()
.ok_or_else(|| serde::de::Error::custom("number out of range"))
.map(Some)
}
_ => Err(serde::de::Error::custom("expected a string or number")),
}
}
fn de_unquote_usize<'de, D>(deserializer: D) -> Result<Option<usize>, D::Error>
where
D: Deserializer<'de>,
{
let value: Option<serde_json::Value> = Option::deserialize(deserializer)?;
match value {
None => Ok(None),
Some(serde_json::Value::String(mut s)) => {
if s.starts_with('"') && s.ends_with('"') && s.len() >= 2 {
s = s[1..s.len() - 1].to_string();
}
s.parse::<usize>()
.map(Some)
.map_err(serde::de::Error::custom)
}
Some(serde_json::Value::Number(n)) => {
// If it's a number, convert it to usize
n.as_u64()
.ok_or_else(|| serde::de::Error::custom("number out of range"))
.map(|v| v as usize)
.map(Some)
}
_ => {
dbg!(value);
Err(serde::de::Error::custom("expected a string or number"))
}
}
}
#[derive(Debug, Default, Deserialize, JsonSchema)]
pub struct Pagination {
#[serde(alias = "p")]
#[schemars(description = "Pagination index")]
pub page: usize,
}
impl Pagination {
const PER_PAGE: usize = 1_000;
pub fn start(&self, len: usize) -> usize {
(self.page * Self::PER_PAGE).clamp(0, len)
}
pub fn end(&self, len: usize) -> usize {
((self.page + 1) * Self::PER_PAGE).clamp(0, len)
}
}

View File

@@ -0,0 +1,189 @@
use std::collections::BTreeMap;
use brk_computer::Computer;
use brk_indexer::Indexer;
use brk_vec::AnyCollectableVec;
use derive_deref::{Deref, DerefMut};
use crate::params::Pagination;
use super::index::Index;
#[derive(Default)]
pub struct Vecs<'a> {
pub id_to_index_to_vec: BTreeMap<&'a str, IndexToVec<'a>>,
pub index_to_id_to_vec: BTreeMap<Index, IdToVec<'a>>,
pub ids: Vec<&'a str>,
pub indexes: Vec<&'static str>,
pub accepted_indexes: BTreeMap<&'static str, &'static [&'static str]>,
pub index_count: usize,
pub id_count: usize,
pub vec_count: usize,
serialized_id_to_indexes: BTreeMap<&'a str, Vec<&'static str>>,
serialized_indexes_to_ids: BTreeMap<&'static str, Vec<&'a str>>,
}
impl<'a> Vecs<'a> {
pub fn build(indexer: &'a Indexer, computer: &'a Computer) -> Self {
let mut this = Vecs::default();
indexer
.vecs
.vecs()
.into_iter()
.for_each(|vec| this.insert(vec));
computer
.vecs
.vecs()
.into_iter()
.for_each(|vec| this.insert(vec));
let mut ids = this.id_to_index_to_vec.keys().cloned().collect::<Vec<_>>();
ids.sort_unstable_by(|a, b| {
let len_cmp = a.len().cmp(&b.len());
if len_cmp == std::cmp::Ordering::Equal {
a.cmp(b)
} else {
len_cmp
}
});
this.ids = ids;
this.id_count = this.index_to_id_to_vec.keys().count();
this.index_count = this.index_to_id_to_vec.keys().count();
this.vec_count = this
.index_to_id_to_vec
.values()
.map(|tree| tree.len())
.sum::<usize>();
this.indexes = this
.index_to_id_to_vec
.keys()
.map(|i| i.serialize_long())
.collect::<Vec<_>>();
this.accepted_indexes = this
.index_to_id_to_vec
.keys()
.map(|i| (i.serialize_long(), i.possible_values()))
.collect::<BTreeMap<_, _>>();
this.serialized_id_to_indexes = this
.id_to_index_to_vec
.iter()
.map(|(id, index_to_vec)| {
(
*id,
index_to_vec
.keys()
.map(|i| i.serialize_long())
.collect::<Vec<_>>(),
)
})
.collect();
this.serialized_indexes_to_ids = this
.index_to_id_to_vec
.iter()
.map(|(index, id_to_vec)| {
(
index.serialize_long(),
id_to_vec.keys().cloned().collect::<Vec<_>>(),
)
})
.collect();
this
}
// Not the most performant or type safe but only built once so that's okay
fn insert(&mut self, vec: &'a dyn AnyCollectableVec) {
let name = vec.name();
let serialized_index = vec.index_type_to_string();
let split = name.split("_to_").collect::<Vec<_>>();
if split.len() != 1
&& !(split.len() == 2
&& split.first().is_some_and(|s| {
s == &"up"
|| s == &"start"
|| s.ends_with("relative")
|| s.starts_with("from")
|| s == &"cumulative_up"
|| s.starts_with("cumulative_start")
|| s.starts_with("cumulative_from")
|| s == &"activity"
}))
&& !(split.len() == 3
&& split.first().is_some_and(|s| {
s == &"up"
|| s == &"start"
|| s.starts_with("from")
|| s == &"cumulative_up"
|| s == &"cumulative_start"
|| s.starts_with("cumulative_from")
})
&& split.get(1).is_some_and(|s| s.ends_with("relative")))
{
dbg!((&serialized_index, &name, &split));
unreachable!();
}
let index = Index::try_from(serialized_index)
.inspect_err(|_| {
dbg!(&serialized_index);
})
.unwrap();
let prev = self
.id_to_index_to_vec
.entry(name)
.or_default()
.insert(index, vec);
if prev.is_some() {
dbg!(serialized_index, name);
panic!()
}
let prev = self
.index_to_id_to_vec
.entry(index)
.or_default()
.insert(name, vec);
if prev.is_some() {
dbg!(serialized_index, name);
panic!()
}
}
pub fn ids(&self, pagination: Pagination) -> &[&'_ str] {
let len = self.ids.len();
let start = pagination.start(len);
let end = pagination.end(len);
&self.ids[start..end]
}
pub fn ids_to_indexes(&self, pagination: Pagination) -> BTreeMap<&'_ str, Vec<&'static str>> {
let len = self.serialized_id_to_indexes.len();
let start = pagination.start(len);
let end = pagination.end(len);
self.serialized_id_to_indexes
.iter()
.skip(start)
.take(end)
.map(|(ids, indexes)| (*ids, indexes.clone()))
.collect()
}
pub fn indexes_to_ids(&self, pagination: Pagination) -> BTreeMap<&'static str, Vec<&'a str>> {
let len = self.serialized_indexes_to_ids.len();
let start = pagination.start(len);
let end = pagination.end(len);
self.serialized_indexes_to_ids
.iter()
.skip(start)
.take(end)
.map(|(index, ids)| (*index, ids.clone()))
.collect()
}
}
#[derive(Default, Deref, DerefMut)]
pub struct IndexToVec<'a>(BTreeMap<Index, &'a dyn AnyCollectableVec>);
#[derive(Default, Deref, DerefMut)]
pub struct IdToVec<'a>(BTreeMap<&'a str, &'a dyn AnyCollectableVec>);

View File

@@ -1,28 +0,0 @@
use std::path::Path;
use brk_computer::Computer;
use brk_indexer::Indexer;
use brk_query::{Index, Query};
use brk_vec::{Computation, Format};
pub fn main() -> color_eyre::Result<()> {
color_eyre::install()?;
let outputs_dir = Path::new("../../_outputs");
let format = Format::Compressed;
let mut indexer = Indexer::new(outputs_dir, true)?;
indexer.import_vecs()?;
let mut computer = Computer::new(outputs_dir, None, format);
computer.import_vecs(&indexer, Computation::Lazy)?;
let query = Query::build(&indexer, &computer);
dbg!(query.search_and_format(Index::Height, &["date"], Some(-1), None, None)?);
dbg!(query.search_and_format(Index::Height, &["date"], Some(-10), None, None)?);
dbg!(query.search_and_format(Index::Height, &["date", "timestamp"], Some(-10), None, None)?);
Ok(())
}

View File

@@ -1,129 +0,0 @@
use std::{fmt::Display, ops::Deref, str::FromStr};
use clap::builder::PossibleValuesParser;
use clap_derive::Parser;
use serde::{Deserialize, Deserializer};
use serde_with::{OneOrMany, formats::PreferOne, serde_as};
use crate::{Format, Index};
#[serde_as]
#[derive(Debug, Deserialize, Parser)]
pub struct Params {
#[clap(short, long, value_parser = PossibleValuesParser::new(Index::all_possible_values()))]
#[serde(alias = "i")]
/// Index of the values requested
pub index: String,
#[clap(short, long, value_delimiter = ' ', num_args = 1..)]
#[serde(alias = "v")]
#[serde_as(as = "OneOrMany<_, PreferOne>")]
/// Names of the values requested
pub values: Vec<String>,
#[clap(flatten)]
#[serde(flatten)]
pub rest: ParamsOpt,
}
// The macro creates custom deserialization code.
// You need to specify a function name and the field name of the flattened field.
serde_with::flattened_maybe!(deserialize_rest, "rest");
impl Deref for Params {
type Target = ParamsOpt;
fn deref(&self) -> &Self::Target {
&self.rest
}
}
impl From<((String, String), ParamsOpt)> for Params {
fn from(((index, id), rest): ((String, String), ParamsOpt)) -> Self {
Self {
index,
values: vec![id],
rest,
}
}
}
#[serde_as]
#[derive(Debug, Deserialize, Parser)]
pub struct ParamsOpt {
#[clap(short, long, allow_hyphen_values = true)]
#[serde(default, alias = "f", deserialize_with = "de_unquote_i64")]
/// Inclusive starting index, if negative will be from the end
from: Option<i64>,
#[clap(short, long, allow_hyphen_values = true)]
#[serde(default, alias = "t", deserialize_with = "de_unquote_i64")]
/// Exclusive ending index, if negative will be from the end, overrides 'count'
to: Option<i64>,
#[clap(short, long, allow_hyphen_values = true)]
#[serde(default, alias = "c", deserialize_with = "de_unquote_usize")]
/// Number of values
count: Option<usize>,
#[clap(short = 'F', long)]
/// Format of the output
format: Option<Format>,
}
impl ParamsOpt {
pub fn from(&self) -> Option<i64> {
self.from
}
pub fn to(&self) -> Option<i64> {
if self.to.is_none() {
if let Some(c) = self.count {
let c = c as i64;
if let Some(f) = self.from {
if f.is_positive() || f.abs() > c {
return Some(f + c);
}
} else {
return Some(c);
}
}
}
self.to
}
pub fn format(&self) -> Option<Format> {
self.format
}
}
fn de_unquote_i64<'de, D>(deserializer: D) -> Result<Option<i64>, D::Error>
where
D: Deserializer<'de>,
{
de_unquote(deserializer)
}
fn de_unquote_usize<'de, D>(deserializer: D) -> Result<Option<usize>, D::Error>
where
D: Deserializer<'de>,
{
de_unquote(deserializer)
}
fn de_unquote<'de, D, F>(deserializer: D) -> Result<Option<F>, D::Error>
where
D: Deserializer<'de>,
F: FromStr + Display,
<F as std::str::FromStr>::Err: Display,
{
let opt: Option<String> = Option::deserialize(deserializer)?;
let s = match opt {
None => return Ok(None),
Some(mut s) => {
// strip any leading/trailing quotes
if s.starts_with('"') && s.ends_with('"') && s.len() >= 2 {
s = s[1..s.len() - 1].to_string();
}
s
}
};
s.parse::<F>()
.map(Some)
.map_err(|e| serde::de::Error::custom(format!("cannot parse `{}` as type: {}", s, e)))
}

View File

@@ -1,116 +0,0 @@
use std::collections::BTreeMap;
use brk_vec::AnyCollectableVec;
use derive_deref::{Deref, DerefMut};
use super::index::Index;
#[derive(Default)]
pub struct VecTrees<'a> {
pub id_to_index_to_vec: BTreeMap<String, IndexToVec<'a>>,
pub index_to_id_to_vec: BTreeMap<Index, IdToVec<'a>>,
}
impl<'a> VecTrees<'a> {
// Not the most performant or type safe but only built once so that's okay
pub fn insert(&mut self, vec: &'a dyn AnyCollectableVec) {
let name = vec.name();
let split = name.split("_to_").collect::<Vec<_>>();
if split.len() != 2
&& !(split.len() == 3
&& split.get(1).is_some_and(|s| {
s == &"up"
|| s == &"start"
|| s.ends_with("relative")
|| s.starts_with("from")
|| s == &"cumulative_up"
|| s.starts_with("cumulative_start")
|| s.starts_with("cumulative_from")
|| s == &"activity"
}))
&& !(split.len() == 4
&& split.get(1).is_some_and(|s| {
s == &"up"
|| s == &"start"
|| s.starts_with("from")
|| s == &"cumulative_up"
|| s == &"cumulative_start"
|| s.starts_with("cumulative_from")
})
&& split.get(2).is_some_and(|s| s.ends_with("relative")))
{
dbg!(&name, &split);
panic!();
}
let str = vec
.index_type_to_string()
.split("::")
.last()
.unwrap()
.to_lowercase();
let index = Index::try_from(str.as_str())
.inspect_err(|_| {
dbg!(&str);
})
.unwrap();
if split[0] != index.to_string().to_lowercase() {
dbg!(&name, split[0], index.to_string());
panic!();
}
let key = split[1..].join("_to_").to_string().replace("_", "-");
let prev = self
.id_to_index_to_vec
.entry(key.clone())
.or_default()
.insert(index, vec);
if prev.is_some() {
dbg!(&key, str, name);
panic!()
}
let prev = self
.index_to_id_to_vec
.entry(index)
.or_default()
.insert(key.clone(), vec);
if prev.is_some() {
dbg!(&key, str, name);
panic!()
}
}
pub fn serialize_id_to_index_to_vec(&self) -> BTreeMap<String, Vec<String>> {
self.id_to_index_to_vec
.iter()
.map(|(id, index_to_vec)| {
(
id.to_string(),
index_to_vec
.keys()
.map(|i| i.serialize_long())
.collect::<Vec<_>>(),
)
})
.collect()
}
pub fn serialize_index_to_id_to_vec(&self) -> BTreeMap<String, Vec<String>> {
self.index_to_id_to_vec
.iter()
.map(|(index, id_to_vec)| {
(
index.serialize_long(),
id_to_vec
.keys()
.map(|id| id.to_string())
.collect::<Vec<_>>(),
)
})
.collect()
}
}
#[derive(Default, Deref, DerefMut)]
pub struct IndexToVec<'a>(BTreeMap<Index, &'a dyn AnyCollectableVec>);
#[derive(Default, Deref, DerefMut)]
pub struct IdToVec<'a>(BTreeMap<String, &'a dyn AnyCollectableVec>);

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "brk_server" name = "brk_server"
description = "A crate that serves Bitcoin data and swappable front-ends, built on top of brk_indexer, brk_computer and brk_query" description = "A crate that serves Bitcoin data and swappable front-ends, built on top of brk_indexer, brk_computer and brk_interface"
version.workspace = true version.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true
@@ -12,13 +12,13 @@ axum = { workspace = true }
bitcoincore-rpc = { workspace = true } bitcoincore-rpc = { workspace = true }
brk_bundler = { workspace = true } brk_bundler = { workspace = true }
brk_computer = { workspace = true } brk_computer = { workspace = true }
brk_exit = { workspace = true }
brk_core = { workspace = true } brk_core = { workspace = true }
brk_exit = { workspace = true }
brk_fetcher = { workspace = true } brk_fetcher = { workspace = true }
brk_indexer = { workspace = true } brk_indexer = { workspace = true }
brk_logger = { workspace = true } brk_logger = { workspace = true }
brk_parser = { workspace = true } brk_parser = { workspace = true }
brk_query = { workspace = true } brk_interface = { workspace = true }
brk_vec = { workspace = true } brk_vec = { workspace = true }
clap = { workspace = true } clap = { workspace = true }
clap_derive = { workspace = true } clap_derive = { workspace = true }
@@ -26,12 +26,12 @@ color-eyre = { workspace = true }
jiff = { workspace = true } jiff = { workspace = true }
log = { workspace = true } log = { workspace = true }
minreq = { workspace = true } minreq = { workspace = true }
oxc = { version = "0.73.0", features = ["codegen", "minifier"] } rmcp = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
tower-http = { version = "0.6.6", features = ["compression-full", "trace"] } tower-http = { version = "0.6.6", features = ["compression-full", "trace"] }
zip = "4.1.0"
tracing = "0.1.41" tracing = "0.1.41"
zip = "4.1.0"
[package.metadata.cargo-machete] [package.metadata.cargo-machete]
ignored = ["clap"] ignored = ["clap"]

View File

@@ -31,11 +31,11 @@
</a> </a>
</p> </p>
A crate that serves Bitcoin data and swappable front-ends, built on top of `brk_indexer`, `brk_computer` and `brk_query`. A crate that serves Bitcoin data and swappable front-ends, built on top of `brk_indexer`, `brk_computer` and `brk_interface`.
The file handler, will serve the website specified by the user if any, which can be *no website*, *default* or *custom* (which is a blank folder for people to experiment). If a website is specified and the server is ran outside of the brk project and thus can't find the requested website, it will download the whole project with the correct version from Github and store it in `.brk` to be able to serve to website. This is due to the crate size limit on [crates.io](https://crates.io) and the various shenanigans that need to be done to have a website in a crate. The file handler, will serve the website specified by the user if any, which can be *no website*, *default* or *custom* (which is a blank folder for people to experiment). If a website is specified and the server is ran outside of the brk project and thus can't find the requested website, it will download the whole project with the correct version from Github and store it in `.brk` to be able to serve to website. This is due to the crate size limit on [crates.io](https://crates.io) and the various shenanigans that need to be done to have a website in a crate.
The API uses `brk_query` and so inherites all of its features including formats. The API uses `brk_interface` and so inherites all of its features including formats.
## Endpoints ## Endpoints
@@ -55,7 +55,11 @@ Count of all possible variants
#### [`GET /api/vecs/indexes`](https://bitcoinresearchkit.org/api/vecs/indexes) #### [`GET /api/vecs/indexes`](https://bitcoinresearchkit.org/api/vecs/indexes)
A list of all possible vec indexes and their accepted variants Get a list of all possible indexes
#### [`GET /api/vecs/accepted-indexes`](https://bitcoinresearchkit.org/api/vecs/accepted-indexes)
Get a list of possible indexes and all their accepted variants
#### [`GET /api/vecs/ids`](https://bitcoinresearchkit.org/api/vecs/ids) #### [`GET /api/vecs/ids`](https://bitcoinresearchkit.org/api/vecs/ids)
@@ -73,7 +77,7 @@ A list of all possible vec ids and their supported vec indexes
A list of all possible vec indexes and their supported vec ids A list of all possible vec indexes and their supported vec ids
#### `GET /api/{INDEX}-to-{ID}` #### `GET /api/vecs/{INDEX}-to-{ID}`
This endpoint retrieves data based on the specified vector index and id. This endpoint retrieves data based on the specified vector index and id.
@@ -88,22 +92,27 @@ This endpoint retrieves data based on the specified vector index and id.
**Examples:** **Examples:**
``` ```sh
GET /api/date-to-close # GET /api/vecs/date-to-close
GET /date-to-close?from=-100 curl https://bitcoinresearchkit.org/api/vecs/date-to-close
GET /date-to-close?count=100&format=csv
# GET /api/vecs/date-to-close?from=-100
curl https://bitcoinresearchkit.org/api/vecs/date-to-close?from=-100
# GET /api/vecs/date-to-close?count=100&format=csv
curl https://bitcoinresearchkit.org/api/vecs/date-to-close?count=100&format=csv
``` ```
#### `GET /api/query` #### `GET /api/vecs/query`
This endpoint retrieves data based on the specified vector index and values. This endpoint retrieves data based on the specified vector index and ids.
**Parameters:** **Parameters:**
| Parameter | Type | Required | Description | | Parameter | Type | Required | Description |
| --- | --- | --- | --- | | --- | --- | --- | --- |
| `index` | `VecIndex` | Yes | The vector index to query. | | `index` | `VecIndex` | Yes | The vector index to query. |
| `values` | `VecId[]` | Yes | A comma or space-separated list of vector IDs to retrieve. | | `ids` | `VecId[]` | Yes | A comma or space-separated list of vector IDs to retrieve. |
| `from` | `signed int` | No | Inclusive starting index for pagination (default is 0). | | `from` | `signed int` | No | Inclusive starting index for pagination (default is 0). |
| `to` | `signed int` | No | Exclusive ending index for pagination (default is the total number of results). Overrides `count` | | `to` | `signed int` | No | Exclusive ending index for pagination (default is the total number of results). Overrides `count` |
| `count` | `unsigned int` | No | The number of values requested | | `count` | `unsigned int` | No | The number of values requested |
@@ -111,9 +120,12 @@ This endpoint retrieves data based on the specified vector index and values.
**Examples:** **Examples:**
``` ```sh
GET /api/query?index=date&values=ohlc # GET /api/vecs/query?index=date&ids=ohlc
GET /api/query?index=week&values=ohlc,block-interval-average&from=0&to=20&format=md curl https://bitcoinresearchkit.org/api/vecs/query?index=date&ids=ohlc
# GET /api/vecs/query?index=week&ids=ohlc,block-interval-average&from=0&to=20&format=md
curl https://bitcoinresearchkit.org/api/vecs/query?index=week&ids=ohlc,block-interval-average&from=0&to=20&format=md
``` ```
### Meta ### Meta

View File

@@ -31,15 +31,12 @@ pub fn main() -> color_eyre::Result<()> {
let format = Format::Compressed; let format = Format::Compressed;
let mut indexer = Indexer::new(outputs_dir, true)?; let mut indexer = Indexer::forced_import(outputs_dir)?;
indexer.import_stores()?;
indexer.import_vecs()?;
let fetcher = Some(Fetcher::import(None)?); let fetcher = Some(Fetcher::import(None)?);
let mut computer = Computer::new(outputs_dir, fetcher, format); let mut computer =
computer.import_stores(&indexer)?; Computer::forced_import(outputs_dir, &indexer, Computation::Lazy, fetcher, format)?;
computer.import_vecs(&indexer, Computation::Lazy)?;
tokio::runtime::Builder::new_multi_thread() tokio::runtime::Builder::new_multi_thread()
.enable_all() .enable_all()
@@ -58,7 +55,7 @@ pub fn main() -> color_eyre::Result<()> {
loop { loop {
let block_count = rpc.get_block_count()?; let block_count = rpc.get_block_count()?;
let starting_indexes = indexer.index(&parser, rpc, &exit)?; let starting_indexes = indexer.index(&parser, rpc, &exit, true)?;
computer.compute(&mut indexer, starting_indexes, &exit)?; computer.compute(&mut indexer, starting_indexes, &exit)?;

View File

@@ -1,6 +1,6 @@
use std::{fs, io, path::Path}; use std::{fs, io, path::Path};
use brk_query::{Index, Query}; use brk_interface::{Index, Interface};
use crate::{VERSION, Website}; use crate::{VERSION, Website};
@@ -11,7 +11,7 @@ pub trait Bridge {
fn generate_bridge_file(&self, website: Website, websites_path: &Path) -> io::Result<()>; fn generate_bridge_file(&self, website: Website, websites_path: &Path) -> io::Result<()>;
} }
impl Bridge for Query<'static> { impl Bridge for Interface<'static> {
fn generate_bridge_file(&self, website: Website, websites_path: &Path) -> io::Result<()> { fn generate_bridge_file(&self, website: Website, websites_path: &Path) -> io::Result<()> {
if website.is_none() { if website.is_none() {
return Ok(()); return Ok(());
@@ -68,8 +68,7 @@ export const VERSION = \"v{}\";
contents += " return {\n"; contents += " return {\n";
self.vec_trees self.id_to_index_to_vec()
.id_to_index_to_vec
.iter() .iter()
.for_each(|(id, index_to_vec)| { .for_each(|(id, index_to_vec)| {
let indexes = index_to_vec let indexes = index_to_vec

View File

@@ -1,10 +1,11 @@
use axum::{ use axum::{
Json, Json,
extract::{Query as AxumQuery, State}, extract::{Query, State},
http::{HeaderMap, StatusCode}, http::{HeaderMap, StatusCode},
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use brk_query::{Format, Index, Output, Params}; use brk_core::DateIndex;
use brk_interface::{Format, Output, Params};
use brk_vec::{CollectableVec, StoredVec}; use brk_vec::{CollectableVec, StoredVec};
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
@@ -20,7 +21,7 @@ const MAX_WEIGHT: usize = 320_000;
pub async fn handler( pub async fn handler(
headers: HeaderMap, headers: HeaderMap,
query: AxumQuery<Params>, query: Query<Params>,
State(app_state): State<AppState>, State(app_state): State<AppState>,
) -> Response { ) -> Response {
match req_to_response_res(headers, query, app_state) { match req_to_response_res(headers, query, app_state) {
@@ -36,33 +37,24 @@ pub async fn handler(
fn req_to_response_res( fn req_to_response_res(
headers: HeaderMap, headers: HeaderMap,
AxumQuery(Params { Query(params): Query<Params>,
index, AppState { interface, .. }: AppState,
values,
rest,
}): AxumQuery<Params>,
AppState { query, .. }: AppState,
) -> color_eyre::Result<Response> { ) -> color_eyre::Result<Response> {
let index = Index::try_from(index.as_str())?; let vecs = interface.search(&params);
let vecs = query.search(
index,
&values.iter().map(|v| v.as_str()).collect::<Vec<_>>(),
);
if vecs.is_empty() { if vecs.is_empty() {
return Ok(Json(vec![] as Vec<usize>).into_response()); return Ok(Json(vec![] as Vec<usize>).into_response());
} }
let from = rest.from(); let from = params.from();
let to = rest.to(); let to = params.to();
let format = rest.format(); let format = params.format();
let weight = vecs let weight = vecs
.iter() .iter()
.map(|(_, v)| { .map(|(_, v)| {
let len = v.len(); let len = v.len();
let count = StoredVec::<usize, usize>::range_count(from, to, len); let count = StoredVec::<DateIndex, usize>::range_count(from, to, len);
count * v.value_type_to_size_of() count * v.value_type_to_size_of()
}) })
.sum::<usize>(); .sum::<usize>();
@@ -91,15 +83,15 @@ fn req_to_response_res(
} }
} }
let output = query.format(vecs, from, to, format)?; let output = interface.format(vecs, &params.rest)?;
let mut response = match output { let mut response = match output {
Output::CSV(s) => s.into_response(), Output::CSV(s) => s.into_response(),
Output::TSV(s) => s.into_response(), Output::TSV(s) => s.into_response(),
Output::Json(v) => match v { Output::Json(v) => match v {
brk_query::Value::Single(v) => Json(v).into_response(), brk_interface::Value::Single(v) => Json(v).into_response(),
brk_query::Value::List(v) => Json(v).into_response(), brk_interface::Value::List(v) => Json(v).into_response(),
brk_query::Value::Matrix(v) => Json(v).into_response(), brk_interface::Value::Matrix(v) => Json(v).into_response(),
}, },
Output::MD(s) => s.into_response(), Output::MD(s) => s.into_response(),
}; };

View File

@@ -1,5 +1,3 @@
use std::collections::BTreeMap;
use axum::{ use axum::{
Json, Router, Json, Router,
extract::{Path, Query, State}, extract::{Path, Query, State},
@@ -7,131 +5,132 @@ use axum::{
response::{IntoResponse, Redirect, Response}, response::{IntoResponse, Redirect, Response},
routing::get, routing::get,
}; };
use brk_query::{Params, ParamsOpt}; use brk_interface::{Index, Pagination, Params, ParamsOpt};
use super::AppState; use super::AppState;
mod explorer; mod explorer;
mod query; mod interface;
pub use query::Bridge; pub use interface::Bridge;
pub trait ApiRoutes { pub trait ApiRoutes {
fn add_api_routes(self) -> Self; fn add_api_routes(self) -> Self;
} }
const TO_SEPARATOR: &str = "-to-";
impl ApiRoutes for Router<AppState> { impl ApiRoutes for Router<AppState> {
fn add_api_routes(self) -> Self { fn add_api_routes(self) -> Self {
self.route("/api/query", get(query::handler)) self.route(
.route("/api/vecs/id-count", get(id_count_handler)) "/api/vecs/index-count",
.route("/api/vecs/index-count", get(index_count_handler)) get(async |State(app_state): State<AppState>| -> Response {
.route("/api/vecs/variant-count", get(variant_count_handler)) Json(app_state.interface.get_index_count()).into_response()
.route("/api/vecs/ids", get(ids_handler)) }),
.route("/api/vecs/indexes", get(indexes_handler)) )
.route("/api/vecs/variants", get(variants_handler)) .route(
.route("/api/vecs/id-to-indexes", get(id_to_indexes_handler)) "/api/vecs/id-count",
.route("/api/vecs/index-to-ids", get(index_to_ids_handler)) get(async |State(app_state): State<AppState>| -> Response {
.route("/api/{variant}", get(variant_handler)) Json(app_state.interface.get_vecid_count()).into_response()
.route( }),
"/api", )
get(|| async { .route(
Redirect::temporary( "/api/vecs/vec-count",
"https://github.com/bitcoinresearchkit/brk/tree/main/crates/brk_server#api", get(async |State(app_state): State<AppState>| -> Response {
) Json(app_state.interface.get_vec_count()).into_response()
}), }),
) )
.route(
"/api/vecs/indexes",
get(async |State(app_state): State<AppState>| -> Response {
Json(app_state.interface.get_indexes()).into_response()
}),
)
.route(
"/api/vecs/accepted-indexes",
get(async |State(app_state): State<AppState>| -> Response {
Json(app_state.interface.get_accepted_indexes()).into_response()
}),
)
.route(
"/api/vecs/ids",
get(
async |State(app_state): State<AppState>,
Query(pagination): Query<Pagination>|
-> Response {
Json(app_state.interface.get_vecids(pagination)).into_response()
},
),
)
.route(
"/api/vecs/indexes-to-ids",
get(
async |State(app_state): State<AppState>,
Query(pagination): Query<Pagination>|
-> Response {
Json(app_state.interface.get_indexes_to_vecids(pagination)).into_response()
},
),
)
.route(
"/api/vecs/ids-to-indexes",
get(
async |State(app_state): State<AppState>,
Query(pagination): Query<Pagination>|
-> Response {
Json(app_state.interface.get_vecids_to_indexes(pagination)).into_response()
},
),
)
// .route("/api/vecs/variants", get(variants_handler))
.route("/api/vecs/query", get(interface::handler))
.route(
"/api/vecs/{variant}",
get(
async |headers: HeaderMap,
Path(variant): Path<String>,
Query(params_opt): Query<ParamsOpt>,
state: State<AppState>|
-> Response {
let variant = variant.replace("_", "-");
let mut split = variant.split(TO_SEPARATOR);
let params = Params::from((
(
Index::try_from(split.next().unwrap()).unwrap(),
split.collect::<Vec<_>>().join(TO_SEPARATOR),
),
params_opt,
));
interface::handler(headers, Query(params), state).await
},
),
)
.route(
"/api",
get(|| async {
Redirect::temporary(
"https://github.com/bitcoinresearchkit/brk/tree/main/crates/brk_server#api",
)
}),
)
} }
} }
pub async fn ids_handler(State(app_state): State<AppState>) -> Response { // pub async fn variants_handler(State(app_state): State<AppState>) -> Response {
Json( // Json(
app_state // app_state
.query // .query
.vec_trees // .vec_trees
.id_to_index_to_vec // .index_to_id_to_vec
.keys() // .iter()
.collect::<Vec<_>>(), // .flat_map(|(index, id_to_vec)| {
) // let index_ser = index.serialize_long();
.into_response() // id_to_vec
} // .keys()
// .map(|id| format!("{}-to-{}", index_ser, id))
pub async fn variant_count_handler(State(app_state): State<AppState>) -> Response { // .collect::<Vec<_>>()
Json( // })
app_state // .collect::<Vec<_>>(),
.query // )
.vec_trees // .into_response()
.index_to_id_to_vec // }
.values()
.map(|tree| tree.len())
.sum::<usize>(),
)
.into_response()
}
pub async fn id_count_handler(State(app_state): State<AppState>) -> Response {
Json(app_state.query.vec_trees.id_to_index_to_vec.keys().count()).into_response()
}
pub async fn index_count_handler(State(app_state): State<AppState>) -> Response {
Json(app_state.query.vec_trees.index_to_id_to_vec.keys().count()).into_response()
}
pub async fn indexes_handler(State(app_state): State<AppState>) -> Response {
Json(
app_state
.query
.vec_trees
.index_to_id_to_vec
.keys()
.map(|i| (i.to_string().to_lowercase(), i.possible_values()))
.collect::<BTreeMap<_, _>>(),
)
.into_response()
}
pub async fn variants_handler(State(app_state): State<AppState>) -> Response {
Json(
app_state
.query
.vec_trees
.index_to_id_to_vec
.iter()
.flat_map(|(index, id_to_vec)| {
let index_ser = index.serialize_long();
id_to_vec
.keys()
.map(|id| format!("{}-to-{}", index_ser, id))
.collect::<Vec<_>>()
})
.collect::<Vec<_>>(),
)
.into_response()
}
pub async fn id_to_indexes_handler(State(app_state): State<AppState>) -> Response {
Json(app_state.query.vec_trees.serialize_id_to_index_to_vec()).into_response()
}
pub async fn index_to_ids_handler(State(app_state): State<AppState>) -> Response {
Json(app_state.query.vec_trees.serialize_index_to_id_to_vec()).into_response()
}
const TO_SEPARATOR: &str = "-to-";
pub async fn variant_handler(
headers: HeaderMap,
Path(variant): Path<String>,
Query(params_opt): Query<ParamsOpt>,
state: State<AppState>,
) -> Response {
let variant = variant.replace("_", "-");
let mut split = variant.split(TO_SEPARATOR);
let params = Params::from((
(
split.next().unwrap().to_string(),
split.collect::<Vec<_>>().join(TO_SEPARATOR),
),
params_opt,
));
query::handler(headers, Query(params), state).await
}

View File

@@ -23,7 +23,7 @@ use brk_bundler::bundle;
use brk_computer::Computer; use brk_computer::Computer;
use brk_core::dot_brk_path; use brk_core::dot_brk_path;
use brk_indexer::Indexer; use brk_indexer::Indexer;
use brk_query::Query; use brk_interface::Interface;
use color_eyre::owo_colors::OwoColorize; use color_eyre::owo_colors::OwoColorize;
use files::FilesRoutes; use files::FilesRoutes;
use log::{error, info}; use log::{error, info};
@@ -32,16 +32,16 @@ use tower_http::{compression::CompressionLayer, trace::TraceLayer};
mod api; mod api;
mod files; mod files;
mod mcp;
mod traits; mod traits;
pub use files::Website; pub use files::Website;
use mcp::*;
use tracing::Span; use tracing::Span;
#[derive(Clone)] #[derive(Clone)]
pub struct AppState { pub struct AppState {
// indexer: &'static Indexer, interface: &'static Interface<'static>,
// computer: &'static Computer,
query: &'static Query<'static>,
website: Website, website: Website,
websites_path: Option<PathBuf>, websites_path: Option<PathBuf>,
} }
@@ -67,7 +67,7 @@ impl Server {
pub fn new(indexer: Indexer, computer: Computer, website: Website) -> color_eyre::Result<Self> { pub fn new(indexer: Indexer, computer: Computer, website: Website) -> color_eyre::Result<Self> {
let indexer = Box::leak(Box::new(indexer)); let indexer = Box::leak(Box::new(indexer));
let computer = Box::leak(Box::new(computer)); let computer = Box::leak(Box::new(computer));
let query = Box::leak(Box::new(Query::build(indexer, computer))); let interface = Box::leak(Box::new(Interface::build(indexer, computer)));
let websites_path = if website.is_some() { let websites_path = if website.is_some() {
let websites_dev_path = Path::new(DEV_PATH).join(WEBSITES); let websites_dev_path = Path::new(DEV_PATH).join(WEBSITES);
@@ -99,7 +99,7 @@ impl Server {
downloaded_websites_path downloaded_websites_path
}; };
query.generate_bridge_file(website, websites_path.as_path())?; interface.generate_bridge_file(website, websites_path.as_path())?;
Some(websites_path) Some(websites_path)
} else { } else {
@@ -107,7 +107,7 @@ impl Server {
}; };
Ok(Self(AppState { Ok(Self(AppState {
query, interface,
website, website,
websites_path, websites_path,
})) }))
@@ -162,6 +162,7 @@ impl Server {
let router = Router::new() let router = Router::new()
.add_api_routes() .add_api_routes()
.add_website_routes(state.website) .add_website_routes(state.website)
.add_mcp_routes(state.interface)
.route("/version", get(Json(VERSION))) .route("/version", get(Json(VERSION)))
.with_state(state) .with_state(state)
.layer(compression_layer) .layer(compression_layer)

View File

@@ -0,0 +1,153 @@
use brk_interface::{Interface, Pagination, Params};
use rmcp::{
Error as McpError, RoleServer, ServerHandler,
model::{
CallToolResult, Content, Implementation, InitializeRequestParam, InitializeResult,
ProtocolVersion, ServerCapabilities, ServerInfo,
},
service::RequestContext,
tool,
};
#[derive(Clone)]
pub struct API {
interface: &'static Interface<'static>,
}
const VERSION: &str = env!("CARGO_PKG_VERSION");
#[tool(tool_box)]
impl API {
pub fn new(interface: &'static Interface<'static>) -> Self {
Self { interface }
}
#[tool(description = "
Get the count of all existing indexes
")]
async fn get_index_count(&self) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![
Content::json(self.interface.get_index_count()).unwrap(),
]))
}
#[tool(description = "
Get the count of all existing vec ids
")]
async fn get_vecid_count(&self) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![
Content::json(self.interface.get_vecid_count()).unwrap(),
]))
}
#[tool(description = "
Get the count of all existing vecs
")]
async fn get_variant_count(&self) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![
Content::json(self.interface.get_vec_count()).unwrap(),
]))
}
#[tool(description = "
Get the list of all existing indexes
")]
async fn get_indexes(&self) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![
Content::json(self.interface.get_indexes()).unwrap(),
]))
}
#[tool(description = "
Get an object which has all existing indexes as keys and a list of their accepted variants as values
")]
async fn get_accepted_indexes(&self) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![
Content::json(self.interface.get_accepted_indexes()).unwrap(),
]))
}
#[tool(description = "
Get the list of all existing vec ids
")]
async fn get_vecids(
&self,
#[tool(aggr)] pagination: Pagination,
) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![
Content::json(self.interface.get_vecids(pagination)).unwrap(),
]))
}
#[tool(description = "
Get an object which has all existing indexes as keys and a list of ids of vecs which support that index as values
")]
async fn get_indexes_to_vecids(
&self,
#[tool(aggr)] pagination: Pagination,
) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![
Content::json(self.interface.get_indexes_to_vecids(pagination)).unwrap(),
]))
}
#[tool(description = "
Get an object which has all existing vec ids as keys and a list of indexes supported by that vec id as values
")]
async fn get_vecids_to_indexes(
&self,
#[tool(aggr)] pagination: Pagination,
) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![
Content::json(self.interface.get_vecids_to_indexes(pagination)).unwrap(),
]))
}
#[tool(description = "Get one or multiple vecs depending on given parameters")]
fn get_vecs(&self, #[tool(aggr)] params: Params) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![
Content::json(self.interface.search_and_format(params).unwrap()).unwrap(),
]))
}
#[tool(description = "
Get the running version of the Bitcoin Research Kit
")]
async fn get_version(&self) -> Result<CallToolResult, McpError> {
Ok(CallToolResult::success(vec![Content::text(format!(
"v{VERSION}"
))]))
}
}
#[tool(tool_box)]
impl ServerHandler for API {
fn get_info(&self) -> ServerInfo {
ServerInfo {
protocol_version: ProtocolVersion::V_2025_03_26,
capabilities: ServerCapabilities::builder().enable_tools().build(),
server_info: Implementation::from_build_env(),
instructions: Some(
"
This server provides an interface to communicate with a running instance of the Bitcoin Research Kit (brk).
Multiple tools are at your disposal including ones to fetch all sorts of Bitcoin on-chain data.
Arrays are also called Vectors (or Vecs).
"
.to_string(),
),
}
}
async fn initialize(
&self,
_request: InitializeRequestParam,
context: RequestContext<RoleServer>,
) -> Result<InitializeResult, McpError> {
if let Some(http_request_part) = context.extensions.get::<axum::http::request::Parts>() {
let initialize_headers = &http_request_part.headers;
let initialize_uri = &http_request_part.uri;
tracing::info!(?initialize_headers, %initialize_uri, "initialize from http server");
}
Ok(self.get_info())
}
}

View File

@@ -0,0 +1,32 @@
use axum::Router;
use brk_interface::Interface;
use rmcp::transport::{
StreamableHttpServerConfig,
streamable_http_server::{StreamableHttpService, session::local::LocalSessionManager},
};
mod api;
use api::*;
use crate::AppState;
pub trait MCPRoutes {
fn add_mcp_routes(self, interface: &'static Interface<'static>) -> Self;
}
impl MCPRoutes for Router<AppState> {
fn add_mcp_routes(self, interface: &'static Interface<'static>) -> Self {
let config = StreamableHttpServerConfig {
// stateful_mode: false,
..Default::default()
};
let service = StreamableHttpService::new(
move || Ok(API::new(interface)),
LocalSessionManager::default().into(),
config,
);
self.nest_service("/mcp", service)
}
}

View File

@@ -10,9 +10,7 @@ repository.workspace = true
[dependencies] [dependencies]
bincode = { workspace = true } bincode = { workspace = true }
brk_core = { workspace = true } brk_core = { workspace = true }
brk_store = { workspace = true }
derive_deref = { workspace = true } derive_deref = { workspace = true }
fjall = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
zerocopy = { workspace = true } zerocopy = { workspace = true }
zerocopy-derive = { workspace = true } zerocopy-derive = { workspace = true }

View File

@@ -1,6 +1,6 @@
use std::{fs, path::Path}; use std::{fs, path::Path};
use brk_core::Version; use brk_core::{DateIndex, Version};
use brk_vec::{AnyVec, CollectableVec, Format, GenericStoredVec, StoredVec, VecIterator}; use brk_vec::{AnyVec, CollectableVec, Format, GenericStoredVec, StoredVec, VecIterator};
fn main() -> Result<(), Box<dyn std::error::Error>> { fn main() -> Result<(), Box<dyn std::error::Error>> {
@@ -10,7 +10,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let format = Format::Compressed; let format = Format::Compressed;
{ {
let mut vec: StoredVec<usize, u32> = let mut vec: StoredVec<DateIndex, u32> =
StoredVec::forced_import(Path::new("."), "vec", version, format)?; StoredVec::forced_import(Path::new("."), "vec", version, format)?;
(0..21_u32).for_each(|v| { (0..21_u32).for_each(|v| {
@@ -18,56 +18,56 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
}); });
let mut iter = vec.into_iter(); let mut iter = vec.into_iter();
dbg!(iter.get(0)); dbg!(iter.get(0.into()));
dbg!(iter.get(20)); dbg!(iter.get(20.into()));
dbg!(iter.get(21)); dbg!(iter.get(21.into()));
vec.flush()?; vec.flush()?;
} }
{ {
let mut vec: StoredVec<usize, u32> = let mut vec: StoredVec<DateIndex, u32> =
StoredVec::forced_import(Path::new("."), "vec", version, format)?; StoredVec::forced_import(Path::new("."), "vec", version, format)?;
let mut iter = vec.into_iter(); let mut iter = vec.into_iter();
dbg!(iter.get(0)); dbg!(iter.get(0.into()));
dbg!(iter.get(0)); dbg!(iter.get(0.into()));
dbg!(iter.get(1)); dbg!(iter.get(1.into()));
dbg!(iter.get(2)); dbg!(iter.get(2.into()));
dbg!(iter.get(20)); dbg!(iter.get(20.into()));
dbg!(iter.get(20)); dbg!(iter.get(20.into()));
dbg!(iter.get(0)); dbg!(iter.get(0.into()));
vec.push(21); vec.push(21);
vec.push(22); vec.push(22);
let mut iter = vec.into_iter(); let mut iter = vec.into_iter();
dbg!(iter.get(20)); dbg!(iter.get(20.into()));
dbg!(iter.get(21)); dbg!(iter.get(21.into()));
dbg!(iter.get(22)); dbg!(iter.get(22.into()));
dbg!(iter.get(23)); dbg!(iter.get(23.into()));
vec.flush()?; vec.flush()?;
} }
{ {
let mut vec: StoredVec<usize, u32> = let mut vec: StoredVec<DateIndex, u32> =
StoredVec::forced_import(Path::new("."), "vec", version, format)?; StoredVec::forced_import(Path::new("."), "vec", version, format)?;
let mut iter = vec.into_iter(); let mut iter = vec.into_iter();
dbg!(iter.get(0)); dbg!(iter.get(0.into()));
dbg!(iter.get(20)); dbg!(iter.get(20.into()));
dbg!(iter.get(21)); dbg!(iter.get(21.into()));
dbg!(iter.get(22)); dbg!(iter.get(22.into()));
vec.truncate_if_needed(14)?; vec.truncate_if_needed(14.into())?;
let mut iter = vec.into_iter(); let mut iter = vec.into_iter();
iter.get(0); iter.get(0.into());
iter.get(5); iter.get(5.into());
dbg!(iter.get(20)); dbg!(iter.get(20.into()));
dbg!(vec.collect_signed_range(Some(-5), None)?); dbg!(vec.collect_signed_range(Some(-5), None)?);

View File

@@ -6,13 +6,13 @@ use super::{BoxedVecIterator, StoredIndex, StoredType};
pub trait AnyVec: Send + Sync { pub trait AnyVec: Send + Sync {
fn version(&self) -> Version; fn version(&self) -> Version;
fn name(&self) -> String; fn name(&self) -> &str;
fn len(&self) -> usize; fn len(&self) -> usize;
fn is_empty(&self) -> bool { fn is_empty(&self) -> bool {
self.len() == 0 self.len() == 0
} }
fn modified_time(&self) -> Result<Duration>; fn modified_time(&self) -> Result<Duration>;
fn index_type_to_string(&self) -> String; fn index_type_to_string(&self) -> &'static str;
fn value_type_to_size_of(&self) -> usize; fn value_type_to_size_of(&self) -> usize;
} }

View File

@@ -66,7 +66,7 @@ where
self.mut_pushed().push(value) self.mut_pushed().push(value)
} }
fn path(&self) -> &Path; fn path(&self) -> PathBuf;
// --- // ---
@@ -141,7 +141,7 @@ where
#[inline] #[inline]
fn path_vec(&self) -> PathBuf { fn path_vec(&self) -> PathBuf {
Self::path_vec_(self.path()) Self::path_vec_(&self.path())
} }
#[inline] #[inline]
fn path_vec_(path: &Path) -> PathBuf { fn path_vec_(path: &Path) -> PathBuf {
@@ -158,16 +158,6 @@ where
path.join("compressed") path.join("compressed")
} }
#[inline]
fn name_(&self) -> String {
self.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
}
fn modified_time_(&self) -> Result<Duration> { fn modified_time_(&self) -> Result<Duration> {
Ok(self Ok(self
.path_vec() .path_vec()

View File

@@ -1,10 +1,6 @@
use std::{ use std::{fmt::Debug, ops::Add};
fmt::Debug,
ops::Add,
path::{Path, PathBuf},
};
use brk_core::{Error, Result}; use brk_core::{Error, Printable, Result};
pub trait StoredIndex pub trait StoredIndex
where where
@@ -20,18 +16,12 @@ where
+ From<usize> + From<usize>
+ Add<usize, Output = Self> + Add<usize, Output = Self>
+ Send + Send
+ Sync, + Sync
+ Printable,
{ {
fn unwrap_to_usize(self) -> usize; fn unwrap_to_usize(self) -> usize;
fn to_usize(self) -> Result<usize>; fn to_usize(self) -> Result<usize>;
fn to_string() -> String;
fn decremented(self) -> Option<Self>; fn decremented(self) -> Option<Self>;
fn to_folder_name(value_name: &str) -> String {
format!("{}_to_{value_name}", Self::to_string().to_lowercase())
}
fn path(path: &Path, value_name: &str) -> PathBuf {
path.join(Self::to_folder_name(value_name))
}
} }
impl<I> StoredIndex for I impl<I> StoredIndex for I
@@ -48,7 +38,8 @@ where
+ From<usize> + From<usize>
+ Add<usize, Output = Self> + Add<usize, Output = Self>
+ Send + Send
+ Sync, + Sync
+ Printable,
{ {
#[inline] #[inline]
fn unwrap_to_usize(self) -> usize { fn unwrap_to_usize(self) -> usize {
@@ -60,15 +51,6 @@ where
self.try_into().map_err(|_| Error::FailedKeyTryIntoUsize) self.try_into().map_err(|_| Error::FailedKeyTryIntoUsize)
} }
#[inline]
fn to_string() -> String {
std::any::type_name::<I>()
.split("::")
.last()
.unwrap()
.to_lowercase()
}
#[inline] #[inline]
fn decremented(self) -> Option<Self> { fn decremented(self) -> Option<Self> {
self.unwrap_to_usize().checked_sub(1).map(Self::from) self.unwrap_to_usize().checked_sub(1).map(Self::from)

View File

@@ -1,6 +1,6 @@
use std::{iter::Skip, path::Path}; use std::iter::Skip;
use brk_core::Value; use brk_core::{Printable, Value};
use super::{StoredIndex, StoredType}; use super::{StoredIndex, StoredType};
@@ -20,7 +20,7 @@ pub trait BaseVecIterator: Iterator {
fn len(&self) -> usize; fn len(&self) -> usize;
fn path(&self) -> &Path; fn name(&self) -> &str;
fn is_empty(&self) -> bool { fn is_empty(&self) -> bool {
self.len() == 0 self.len() == 0
@@ -62,7 +62,7 @@ pub trait VecIterator<'a>: BaseVecIterator<Item = (Self::I, Value<'a, Self::T>)>
fn unwrap_get_inner_(&mut self, i: usize) -> Self::T { fn unwrap_get_inner_(&mut self, i: usize) -> Self::T {
self.get_(i) self.get_(i)
.unwrap_or_else(|| { .unwrap_or_else(|| {
dbg!(self.path(), i, self.len()); dbg!(self.name(), i, self.len());
panic!("unwrap_get_inner_") panic!("unwrap_get_inner_")
}) })
.into_inner() .into_inner()
@@ -86,7 +86,7 @@ pub trait VecIterator<'a>: BaseVecIterator<Item = (Self::I, Value<'a, Self::T>)>
self.next().map(|(i, v)| (i, Value::Owned(v.into_inner()))) self.next().map(|(i, v)| (i, Value::Owned(v.into_inner())))
} }
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
Self::I::to_string() Self::I::to_string()
} }
} }

View File

@@ -1,7 +1,7 @@
use std::{ use std::{
fs::{self, File}, fs::{self, File},
mem, mem,
path::Path, path::{Path, PathBuf},
sync::Arc, sync::Arc,
time::Duration, time::Duration,
}; };
@@ -41,39 +41,46 @@ where
pub const CACHE_LENGTH: usize = MAX_CACHE_SIZE / Self::PAGE_SIZE; pub const CACHE_LENGTH: usize = MAX_CACHE_SIZE / Self::PAGE_SIZE;
/// Same as import but will reset the folder under certain errors, so be careful ! /// Same as import but will reset the folder under certain errors, so be careful !
pub fn forced_import(path: &Path, mut version: Version) -> Result<Self> { pub fn forced_import(path: &Path, name: &str, mut version: Version) -> Result<Self> {
version = version + VERSION; version = version + VERSION;
let res = Self::import(path, version); let res = Self::import(path, name, version);
match res { match res {
Err(Error::WrongEndian) Err(Error::WrongEndian)
| Err(Error::DifferentVersion { .. }) | Err(Error::DifferentVersion { .. })
| Err(Error::DifferentCompressionMode) => { | Err(Error::DifferentCompressionMode) => {
fs::remove_dir_all(path)?; fs::remove_dir_all(path)?;
Self::import(path, version) Self::import(path, name, version)
} }
_ => res, _ => res,
} }
} }
pub fn import(path: &Path, version: Version) -> Result<Self> { pub fn import(path: &Path, name: &str, version: Version) -> Result<Self> {
fs::create_dir_all(path)?; let pages_meta = {
let path = path.join(name).join(I::to_string());
let vec_exists = fs::exists(Self::path_vec_(path)).is_ok_and(|b| b); let vec_exists = fs::exists(Self::path_vec_(&path)).is_ok_and(|b| b);
let compressed_path = Self::path_compressed_(path); let compressed_path = Self::path_compressed_(&path);
let compressed_exists = fs::exists(&compressed_path).is_ok_and(|b| b); let compressed_exists = fs::exists(&compressed_path).is_ok_and(|b| b);
if vec_exists && !compressed_exists { if vec_exists && !compressed_exists {
return Err(Error::DifferentCompressionMode); return Err(Error::DifferentCompressionMode);
} }
if !vec_exists && !compressed_exists { if !vec_exists && !compressed_exists {
File::create(&compressed_path)?; fs::create_dir_all(&path)?;
} File::create(&compressed_path)?;
}
Arc::new(ArcSwap::new(Arc::new(CompressedPagesMetadata::read(
&path,
)?)))
};
Ok(Self { Ok(Self {
inner: RawVec::import(path, version)?, inner: RawVec::import(path, name, version)?,
pages_meta: Arc::new(ArcSwap::new(Arc::new(CompressedPagesMetadata::read(path)?))), pages_meta,
}) })
} }
@@ -199,7 +206,7 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn path(&self) -> PathBuf {
self.inner.path() self.inner.path()
} }
@@ -351,8 +358,8 @@ where
} }
#[inline] #[inline]
fn name(&self) -> String { fn name(&self) -> &str {
self.name_() self.inner.name()
} }
#[inline] #[inline]
@@ -366,7 +373,7 @@ where
} }
#[inline] #[inline]
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
I::to_string() I::to_string()
} }
@@ -421,8 +428,8 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn name(&self) -> &str {
self.vec.path() self.vec.name()
} }
} }

View File

@@ -84,7 +84,7 @@ where
pub fn forced_import_or_init_from_1( pub fn forced_import_or_init_from_1(
mode: Computation, mode: Computation,
path: &Path, path: &Path,
value_name: &str, name: &str,
version: Version, version: Version,
format: Format, format: Format,
source: BoxedAnyIterableVec<S1I, S1T>, source: BoxedAnyIterableVec<S1I, S1T>,
@@ -92,12 +92,12 @@ where
) -> Result<Self> { ) -> Result<Self> {
Ok(match mode { Ok(match mode {
Computation::Eager => Self::Eager { Computation::Eager => Self::Eager {
vec: EagerVec::forced_import(path, value_name, version, format)?, vec: EagerVec::forced_import(path, name, version, format)?,
deps: Dependencies::From1(source, compute), deps: Dependencies::From1(source, compute),
}, },
Computation::Lazy => { Computation::Lazy => {
let _ = fs::remove_dir_all(I::path(path, value_name)); let _ = fs::remove_dir_all(path.join(name).join(I::to_string()));
Self::LazyFrom1(LazyVecFrom1::init(value_name, version, source, compute)) Self::LazyFrom1(LazyVecFrom1::init(name, version, source, compute))
} }
}) })
} }
@@ -106,7 +106,7 @@ where
pub fn forced_import_or_init_from_2( pub fn forced_import_or_init_from_2(
mode: Computation, mode: Computation,
path: &Path, path: &Path,
value_name: &str, name: &str,
version: Version, version: Version,
format: Format, format: Format,
source1: BoxedAnyIterableVec<S1I, S1T>, source1: BoxedAnyIterableVec<S1I, S1T>,
@@ -115,14 +115,12 @@ where
) -> Result<Self> { ) -> Result<Self> {
Ok(match mode { Ok(match mode {
Computation::Eager => Self::Eager { Computation::Eager => Self::Eager {
vec: EagerVec::forced_import(path, value_name, version, format)?, vec: EagerVec::forced_import(path, name, version, format)?,
deps: Dependencies::From2((source1, source2), compute), deps: Dependencies::From2((source1, source2), compute),
}, },
Computation::Lazy => { Computation::Lazy => {
let _ = fs::remove_dir_all(I::path(path, value_name)); let _ = fs::remove_dir_all(path.join(name).join(I::to_string()));
Self::LazyFrom2(LazyVecFrom2::init( Self::LazyFrom2(LazyVecFrom2::init(name, version, source1, source2, compute))
value_name, version, source1, source2, compute,
))
} }
}) })
} }
@@ -131,7 +129,7 @@ where
pub fn forced_import_or_init_from_3( pub fn forced_import_or_init_from_3(
mode: Computation, mode: Computation,
path: &Path, path: &Path,
value_name: &str, name: &str,
version: Version, version: Version,
format: Format, format: Format,
source1: BoxedAnyIterableVec<S1I, S1T>, source1: BoxedAnyIterableVec<S1I, S1T>,
@@ -141,13 +139,13 @@ where
) -> Result<Self> { ) -> Result<Self> {
Ok(match mode { Ok(match mode {
Computation::Eager => Self::Eager { Computation::Eager => Self::Eager {
vec: EagerVec::forced_import(path, value_name, version, format)?, vec: EagerVec::forced_import(path, name, version, format)?,
deps: Dependencies::From3((source1, source2, source3), compute), deps: Dependencies::From3((source1, source2, source3), compute),
}, },
Computation::Lazy => { Computation::Lazy => {
let _ = fs::remove_dir_all(I::path(path, value_name)); let _ = fs::remove_dir_all(path.join(name).join(I::to_string()));
Self::LazyFrom3(LazyVecFrom3::init( Self::LazyFrom3(LazyVecFrom3::init(
value_name, version, source1, source2, source3, compute, name, version, source1, source2, source3, compute,
)) ))
} }
}) })
@@ -225,7 +223,7 @@ where
} }
} }
fn name(&self) -> String { fn name(&self) -> &str {
match self { match self {
ComputedVec::Eager { vec, .. } => vec.name(), ComputedVec::Eager { vec, .. } => vec.name(),
ComputedVec::LazyFrom1(v) => v.name(), ComputedVec::LazyFrom1(v) => v.name(),
@@ -234,7 +232,7 @@ where
} }
} }
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
I::to_string() I::to_string()
} }
@@ -324,12 +322,12 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn name(&self) -> &str {
match self { match self {
Self::Eager(i) => i.path(), Self::Eager(i) => i.name(),
Self::LazyFrom1(i) => i.path(), Self::LazyFrom1(i) => i.name(),
Self::LazyFrom2(i) => i.path(), Self::LazyFrom2(i) => i.name(),
Self::LazyFrom3(i) => i.path(), Self::LazyFrom3(i) => i.name(),
} }
} }
} }

View File

@@ -106,7 +106,7 @@ where
Ok(()) Ok(())
} }
pub fn path(&self) -> &Path { pub fn path(&self) -> PathBuf {
self.inner.path() self.inner.path()
} }
@@ -136,7 +136,11 @@ where
self.computed_version.store(Arc::new(Some(version))); self.computed_version.store(Arc::new(Some(version)));
if self.is_empty() { if self.is_empty() {
info!("Computing {}...", self.name()) info!(
"Computing {}_to_{}...",
self.index_type_to_string(),
self.name()
)
} }
Ok(()) Ok(())
@@ -1306,7 +1310,7 @@ where
} }
#[inline] #[inline]
fn name(&self) -> String { fn name(&self) -> &str {
self.inner.name() self.inner.name()
} }
@@ -1321,7 +1325,7 @@ where
} }
#[inline] #[inline]
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
I::to_string() I::to_string()
} }

View File

@@ -35,7 +35,7 @@ where
let inner = StoredVec::forced_import(path, value_name, version, format)?; let inner = StoredVec::forced_import(path, value_name, version, format)?;
Ok(Self { Ok(Self {
height: Height::try_from(Self::path_height_(inner.path()).as_path()).ok(), height: Height::try_from(Self::path_height_(&inner.path()).as_path()).ok(),
inner, inner,
}) })
} }
@@ -91,7 +91,7 @@ where
Height::try_from(self.path_height().as_path()) Height::try_from(self.path_height().as_path())
} }
fn path_height(&self) -> PathBuf { fn path_height(&self) -> PathBuf {
Self::path_height_(self.inner.path()) Self::path_height_(&self.inner.path())
} }
fn path_height_(path: &Path) -> PathBuf { fn path_height_(path: &Path) -> PathBuf {
path.join("height") path.join("height")
@@ -109,7 +109,7 @@ where
} }
#[inline] #[inline]
fn name(&self) -> String { fn name(&self) -> &str {
self.inner.name() self.inner.name()
} }
@@ -124,7 +124,7 @@ where
} }
#[inline] #[inline]
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
I::to_string() I::to_string()
} }

View File

@@ -1,4 +1,4 @@
use std::{marker::PhantomData, path::Path}; use std::marker::PhantomData;
use brk_core::{Result, Value, Version}; use brk_core::{Result, Value, Version};
@@ -27,7 +27,7 @@ where
S1T: StoredType, S1T: StoredType,
{ {
pub fn init( pub fn init(
value_name: &str, name: &str,
version: Version, version: Version,
source: BoxedAnyIterableVec<S1I, S1T>, source: BoxedAnyIterableVec<S1I, S1T>,
compute: ComputeFrom1<I, T, S1I, S1T>, compute: ComputeFrom1<I, T, S1I, S1T>,
@@ -37,7 +37,7 @@ where
} }
Self { Self {
name: I::to_folder_name(value_name), name: name.to_string(),
version, version,
source, source,
compute, compute,
@@ -96,8 +96,8 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn name(&self) -> &str {
self.source.path() self.source.name()
} }
} }
@@ -131,11 +131,11 @@ where
self.version() self.version()
} }
fn name(&self) -> String { fn name(&self) -> &str {
self.name.clone() self.name.as_str()
} }
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
I::to_string() I::to_string()
} }

View File

@@ -1,4 +1,4 @@
use std::{marker::PhantomData, path::Path}; use std::marker::PhantomData;
use brk_core::{Result, Value, Version}; use brk_core::{Result, Value, Version};
@@ -33,7 +33,7 @@ where
S2T: StoredType, S2T: StoredType,
{ {
pub fn init( pub fn init(
value_name: &str, name: &str,
version: Version, version: Version,
source1: BoxedAnyIterableVec<S1I, S1T>, source1: BoxedAnyIterableVec<S1I, S1T>,
source2: BoxedAnyIterableVec<S2I, S2T>, source2: BoxedAnyIterableVec<S2I, S2T>,
@@ -52,7 +52,7 @@ where
} }
Self { Self {
name: I::to_folder_name(value_name), name: name.to_string(),
version, version,
source1, source1,
source2, source2,
@@ -126,8 +126,8 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn name(&self) -> &str {
self.source1.path() self.source1.name()
} }
} }
@@ -166,11 +166,11 @@ where
self.version() self.version()
} }
fn name(&self) -> String { fn name(&self) -> &str {
self.name.clone() self.name.as_str()
} }
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
I::to_string() I::to_string()
} }

View File

@@ -1,4 +1,4 @@
use std::{marker::PhantomData, path::Path}; use std::marker::PhantomData;
use brk_core::{Result, Value, Version}; use brk_core::{Result, Value, Version};
@@ -37,7 +37,7 @@ where
S3T: StoredType, S3T: StoredType,
{ {
pub fn init( pub fn init(
value_name: &str, name: &str,
version: Version, version: Version,
source1: BoxedAnyIterableVec<S1I, S1T>, source1: BoxedAnyIterableVec<S1I, S1T>,
source2: BoxedAnyIterableVec<S2I, S2T>, source2: BoxedAnyIterableVec<S2I, S2T>,
@@ -58,7 +58,7 @@ where
} }
Self { Self {
name: I::to_folder_name(value_name), name: name.to_string(),
version, version,
source1, source1,
source2, source2,
@@ -149,8 +149,8 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn name(&self) -> &str {
self.source1.path() self.source1.name()
} }
} }
@@ -195,11 +195,11 @@ where
self.version() self.version()
} }
fn name(&self) -> String { fn name(&self) -> &str {
self.name.clone() self.name.as_str()
} }
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
I::to_string() I::to_string()
} }

View File

@@ -20,7 +20,8 @@ use crate::{
#[derive(Debug)] #[derive(Debug)]
pub struct RawVec<I, T> { pub struct RawVec<I, T> {
version: Version, version: Version,
pathbuf: PathBuf, parent: PathBuf,
name: String,
// Consider Arc<ArcSwap<Option<Mmap>>> for dataraces when reorg ? // Consider Arc<ArcSwap<Option<Mmap>>> for dataraces when reorg ?
mmap: Arc<ArcSwap<Mmap>>, mmap: Arc<ArcSwap<Mmap>>,
pushed: Vec<T>, pushed: Vec<T>,
@@ -33,33 +34,40 @@ where
T: StoredType, T: StoredType,
{ {
/// Same as import but will reset the folder under certain errors, so be careful ! /// Same as import but will reset the folder under certain errors, so be careful !
pub fn forced_import(path: &Path, version: Version) -> Result<Self> { pub fn forced_import(path: &Path, name: &str, version: Version) -> Result<Self> {
let res = Self::import(path, version); let res = Self::import(path, name, version);
match res { match res {
Err(Error::WrongEndian) | Err(Error::DifferentVersion { .. }) => { Err(Error::WrongEndian) | Err(Error::DifferentVersion { .. }) => {
fs::remove_dir_all(path)?; fs::remove_dir_all(path)?;
Self::import(path, version) Self::import(path, name, version)
} }
_ => res, _ => res,
} }
} }
pub fn import(path: &Path, version: Version) -> Result<Self> { pub fn import(path: &Path, name: &str, version: Version) -> Result<Self> {
fs::create_dir_all(path)?; let (version, mmap) = {
let path = path.join(name).join(I::to_string());
let version_path = Self::path_version_(path); fs::create_dir_all(&path)?;
if !version.validate(version_path.as_ref())? { let version_path = Self::path_version_(&path);
version.write(version_path.as_ref())?;
}
let file = Self::open_file_(Self::path_vec_(path).as_path())?; if !version.validate(version_path.as_ref())? {
let mmap = Arc::new(ArcSwap::new(Self::new_mmap(file)?)); version.write(version_path.as_ref())?;
}
let file = Self::open_file_(Self::path_vec_(&path).as_path())?;
let mmap = Arc::new(ArcSwap::new(Self::new_mmap(file)?));
(version, mmap)
};
Ok(Self { Ok(Self {
mmap, mmap,
version, version,
pathbuf: path.to_owned(), name: name.to_string(),
parent: path.to_owned(),
pushed: vec![], pushed: vec![],
phantom: PhantomData, phantom: PhantomData,
}) })
@@ -121,8 +129,8 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn path(&self) -> PathBuf {
self.pathbuf.as_path() self.parent.join(self.name()).join(I::to_string())
} }
fn flush(&mut self) -> Result<()> { fn flush(&mut self) -> Result<()> {
@@ -183,8 +191,8 @@ where
} }
#[inline] #[inline]
fn name(&self) -> String { fn name(&self) -> &str {
self.name_() self.name.as_str()
} }
#[inline] #[inline]
@@ -198,7 +206,7 @@ where
} }
#[inline] #[inline]
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
I::to_string() I::to_string()
} }
@@ -212,7 +220,8 @@ impl<I, T> Clone for RawVec<I, T> {
fn clone(&self) -> Self { fn clone(&self) -> Self {
Self { Self {
version: self.version, version: self.version,
pathbuf: self.pathbuf.clone(), parent: self.parent.clone(),
name: self.name.clone(),
mmap: self.mmap.clone(), mmap: self.mmap.clone(),
pushed: vec![], pushed: vec![],
phantom: PhantomData, phantom: PhantomData,
@@ -243,8 +252,8 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn name(&self) -> &str {
self.vec.path() self.vec.name()
} }
} }

View File

@@ -1,4 +1,7 @@
use std::{path::Path, time::Duration}; use std::{
path::{Path, PathBuf},
time::Duration,
};
use arc_swap::ArcSwap; use arc_swap::ArcSwap;
use brk_core::{Result, Value, Version}; use brk_core::{Result, Value, Version};
@@ -24,23 +27,23 @@ where
{ {
pub fn forced_import( pub fn forced_import(
path: &Path, path: &Path,
value_name: &str, name: &str,
version: Version, version: Version,
format: Format, format: Format,
) -> Result<Self> { ) -> Result<Self> {
let path = I::path(path, value_name); // let path = I::path(path, value_name);
if version == Version::ZERO { if version == Version::ZERO {
dbg!(path, value_name); dbg!(path, name);
panic!("Version must be at least 1, can't verify endianess otherwise"); panic!("Version must be at least 1, can't verify endianess otherwise");
} }
if format.is_compressed() { if format.is_compressed() {
Ok(Self::Compressed(CompressedVec::forced_import( Ok(Self::Compressed(CompressedVec::forced_import(
&path, version, path, name, version,
)?)) )?))
} else { } else {
Ok(Self::Raw(RawVec::forced_import(&path, version)?)) Ok(Self::Raw(RawVec::forced_import(path, name, version)?))
} }
} }
} }
@@ -97,7 +100,7 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn path(&self) -> PathBuf {
match self { match self {
StoredVec::Raw(v) => v.path(), StoredVec::Raw(v) => v.path(),
StoredVec::Compressed(v) => v.path(), StoredVec::Compressed(v) => v.path(),
@@ -133,7 +136,7 @@ where
} }
#[inline] #[inline]
fn index_type_to_string(&self) -> String { fn index_type_to_string(&self) -> &'static str {
I::to_string() I::to_string()
} }
@@ -150,7 +153,7 @@ where
} }
} }
fn name(&self) -> String { fn name(&self) -> &str {
match self { match self {
StoredVec::Raw(v) => v.name(), StoredVec::Raw(v) => v.name(),
StoredVec::Compressed(v) => v.name(), StoredVec::Compressed(v) => v.name(),
@@ -204,10 +207,10 @@ where
} }
#[inline] #[inline]
fn path(&self) -> &Path { fn name(&self) -> &str {
match self { match self {
Self::Compressed(i) => i.path(), Self::Compressed(i) => i.name(),
Self::Raw(i) => i.path(), Self::Raw(i) => i.name(),
} }
} }
} }

View File

@@ -6,6 +6,10 @@ const AUTO = "auto";
const LINE = "line"; const LINE = "line";
const CANDLE = "candle"; const CANDLE = "candle";
/**
* @typedef {"timestamp" | "date" | "week" | "diff. epoch" | "month" | "quarter" | "year" | "decade" } SerializedChartableIndex
*/
/** /**
* @param {Object} args * @param {Object} args
* @param {Colors} args.colors * @param {Colors} args.colors
@@ -186,7 +190,9 @@ export function init({
const date = new Date(latest.time * 1000); const date = new Date(latest.time * 1000);
switch (index) { switch (index) {
case /** @satisfies {Height} */ (5): { case /** @satisfies {Height} */ (5):
case /** @satisfies {DifficultyEpoch} */ (2):
case /** @satisfies {HalvingEpoch} */ (4): {
if ("close" in last) { if ("close" in last) {
last.low = Math.min(last.low, latest.close); last.low = Math.min(last.low, latest.close);
last.high = Math.max(last.high, latest.close); last.high = Math.max(last.high, latest.close);
@@ -314,7 +320,7 @@ export function init({
switch (topSeriesType) { switch (topSeriesType) {
case CANDLE: { case CANDLE: {
series = chart.addCandlestickSeries({ series = chart.addCandlestickSeries({
vecId: "ohlc-in-sats", vecId: "ohlc_in_sats",
name: "Price", name: "Price",
unit: topUnit, unit: topUnit,
inverse: true, inverse: true,
@@ -325,7 +331,7 @@ export function init({
} }
case LINE: { case LINE: {
series = chart.addLineSeries({ series = chart.addLineSeries({
vecId: "close-in-sats", vecId: "close_in_sats",
name: "Price", name: "Price",
unit: topUnit, unit: topUnit,
color: colors.default, color: colors.default,
@@ -459,11 +465,11 @@ export function init({
* @param {Utilities} args.utils * @param {Utilities} args.utils
*/ */
function createIndexSelector({ option, vecIdToIndexes, signals, utils }) { function createIndexSelector({ option, vecIdToIndexes, signals, utils }) {
const choices_ = /** @type {const} */ ([ const choices_ = /** @satisfies {SerializedChartableIndex[]} */ ([
"timestamp", "timestamp",
"date", "date",
"week", "week",
// "difficulty epoch", "diff. epoch",
"month", "month",
"quarter", "quarter",
"year", "year",

View File

@@ -4,6 +4,7 @@
* @import { Option, PartialChartOption, ChartOption, AnyPartialOption, ProcessedOptionAddons, OptionsTree, SimulationOption, AnySeriesBlueprint, SeriesType } from "./options" * @import { Option, PartialChartOption, ChartOption, AnyPartialOption, ProcessedOptionAddons, OptionsTree, SimulationOption, AnySeriesBlueprint, SeriesType } from "./options"
* @import { Valued, SingleValueData, CandlestickData, OHLCTuple, Series, ISeries, LineData, BaselineData, PartialLineStyleOptions, PartialBaselineStyleOptions, PartialCandlestickStyleOptions } from "../packages/lightweight-charts/wrapper" * @import { Valued, SingleValueData, CandlestickData, OHLCTuple, Series, ISeries, LineData, BaselineData, PartialLineStyleOptions, PartialBaselineStyleOptions, PartialCandlestickStyleOptions } from "../packages/lightweight-charts/wrapper"
* @import * as _ from "../packages/ufuzzy/v1.0.18/types" * @import * as _ from "../packages/ufuzzy/v1.0.18/types"
* @import { SerializedChartableIndex } from "./chart";
* @import { Signal, Signals, Accessor } from "../packages/solid-signals/wrapper"; * @import { Signal, Signals, Accessor } from "../packages/solid-signals/wrapper";
* @import { DateIndex, DecadeIndex, DifficultyEpoch, Index, HalvingEpoch, Height, MonthIndex, P2PK33Index, P2PK65Index, P2PKHIndex, P2SHIndex, P2MSIndex, P2AIndex, P2TRIndex, P2WPKHIndex, P2WSHIndex, TxIndex, InputIndex, OutputIndex, VecId, WeekIndex, YearIndex, VecIdToIndexes, QuarterIndex, EmptyOutputIndex, OpReturnIndex, UnknownOutputIndex } from "./vecid-to-indexes" * @import { DateIndex, DecadeIndex, DifficultyEpoch, Index, HalvingEpoch, Height, MonthIndex, P2PK33Index, P2PK65Index, P2PKHIndex, P2SHIndex, P2MSIndex, P2AIndex, P2TRIndex, P2WPKHIndex, P2WSHIndex, TxIndex, InputIndex, OutputIndex, VecId, WeekIndex, YearIndex, VecIdToIndexes, QuarterIndex, EmptyOutputIndex, OpReturnIndex, UnknownOutputIndex } from "./vecid-to-indexes"
*/ */
@@ -710,12 +711,12 @@ function createUtils() {
if ( if (
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
(id.includes("in-sats") || (id.includes("in_sats") ||
(id.endsWith("supply") && (id.endsWith("supply") &&
!(id.endsWith("circulating-supply") || id.endsWith("-own-supply"))) || !(id.endsWith("circulating_supply") || id.endsWith("_own_supply"))) ||
id.endsWith("supply-even") || id.endsWith("supply_even") ||
id.endsWith("supply-in-profit") || id.endsWith("supply_in_profit") ||
id.endsWith("supply-in-loss") || id.endsWith("supply_in_loss") ||
id.endsWith("stack") || id.endsWith("stack") ||
(id.endsWith("value") && !id.includes("realized")) || (id.endsWith("value") && !id.includes("realized")) ||
((id.includes("coinbase") || ((id.includes("coinbase") ||
@@ -723,15 +724,15 @@ function createUtils() {
id.includes("subsidy") || id.includes("subsidy") ||
id.includes("rewards")) && id.includes("rewards")) &&
!( !(
id.startsWith("is-") || id.startsWith("is_") ||
id.includes("in-btc") || id.includes("in_btc") ||
id.includes("in-usd") id.includes("in_usd")
))) )))
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Sats"; unit = "Sats";
} }
if ((!unit || thoroughUnitCheck) && id.includes("in-btc")) { if ((!unit || thoroughUnitCheck) && id.includes("in_btc")) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "BTC"; unit = "BTC";
} }
@@ -743,18 +744,18 @@ function createUtils() {
id === "close" || id === "close" ||
id === "open" || id === "open" ||
id === "marketcap" || id === "marketcap" ||
id.includes("in-usd") || id.includes("in_usd") ||
id.includes("cointime-value") || id.includes("cointime_value") ||
id.startsWith("price") || id.startsWith("price") ||
id.endsWith("price-paid") || id.endsWith("price_paid") ||
id.endsWith("price") || id.endsWith("price") ||
(id.endsWith("-cap") && !id.includes("relative-to")) || (id.endsWith("_cap") && !id.includes("relative_to")) ||
id.endsWith("value-created") || id.endsWith("value_created") ||
id.endsWith("value-destroyed") || id.endsWith("value_destroyed") ||
((id.includes("realized") || id.includes("true-market-mean")) && ((id.includes("realized") || id.includes("true_market_mean")) &&
!id.includes("ratio") && !id.includes("ratio") &&
!id.includes("relative-to")) || !id.includes("relative_to")) ||
((id.endsWith("sma") || id.includes("sma-x")) && ((id.endsWith("sma") || id.includes("sma_x")) &&
!id.includes("ratio")) || !id.includes("ratio")) ||
id === "ath") id === "ath")
) { ) {
@@ -772,12 +773,12 @@ function createUtils() {
id.endsWith("1sd") || id.endsWith("1sd") ||
id.endsWith("2sd") || id.endsWith("2sd") ||
id.endsWith("3sd") || id.endsWith("3sd") ||
id.endsWith("p0-1") || id.endsWith("p0_1") ||
id.endsWith("p0-5") || id.endsWith("p0_5") ||
id.endsWith("p1") || id.endsWith("p1") ||
id.endsWith("p99") || id.endsWith("p99") ||
id.endsWith("p99-5") || id.endsWith("p99_5") ||
id.endsWith("p99-9"))) || id.endsWith("p99_9"))) ||
id.includes("liveliness") || id.includes("liveliness") ||
id.includes("vaultedness") id.includes("vaultedness")
) { ) {
@@ -794,14 +795,14 @@ function createUtils() {
if ( if (
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
(id.endsWith("count") || (id.endsWith("count") ||
id.includes("-count-") || id.includes("_count_") ||
id.startsWith("block-count") || id.startsWith("block_count") ||
id.includes("tx-v")) id.includes("tx_v"))
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Count"; unit = "Count";
} }
if ((!unit || thoroughUnitCheck) && id.startsWith("is-")) { if ((!unit || thoroughUnitCheck) && id.startsWith("is_")) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Bool"; unit = "Bool";
} }
@@ -811,7 +812,7 @@ function createUtils() {
} }
if ( if (
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
(id === "interval" || id.startsWith("block-interval")) (id === "interval" || id.startsWith("block_interval"))
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Seconds"; unit = "Seconds";
@@ -843,13 +844,13 @@ function createUtils() {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Bytes"; unit = "Bytes";
} }
if ((!unit || thoroughUnitCheck) && id.endsWith("-sd")) { if ((!unit || thoroughUnitCheck) && id.endsWith("_sd")) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "sd"; unit = "sd";
} }
if ( if (
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
(id.endsWith("-size") || id.endsWith("-size-sum")) (id.endsWith("_size") || id.endsWith("_size_sum"))
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "mb"; unit = "mb";
@@ -858,7 +859,7 @@ function createUtils() {
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
(id.endsWith("vsize") || (id.endsWith("vsize") ||
id.endsWith("vbytes") || id.endsWith("vbytes") ||
id.endsWith("-vbytes-sum")) id.endsWith("_vbytes_sum"))
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "vB"; unit = "vB";
@@ -873,14 +874,14 @@ function createUtils() {
} }
if ( if (
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
(id === "date" || id === "date-fixed") (id === "date" || id === "date_fixed")
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Date"; unit = "Date";
} }
if ( if (
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
(id === "timestamp" || id === "timestamp-fixed") (id === "timestamp" || id === "timestamp_fixed")
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Timestamp"; unit = "Timestamp";
@@ -905,25 +906,25 @@ function createUtils() {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Height"; unit = "Height";
} }
if ((!unit || thoroughUnitCheck) && id.endsWith("relative-to-market-cap")) { if ((!unit || thoroughUnitCheck) && id.endsWith("relative_to_market_cap")) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "%mcap"; unit = "%mcap";
} }
if ( if (
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
id.endsWith("relative-to-realized-cap") id.endsWith("relative_to_realized_cap")
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "%rcap"; unit = "%rcap";
} }
if ( if (
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
id.endsWith("relative-to-circulating-supply") id.endsWith("relative_to_circulating_supply")
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "%all"; unit = "%all";
} }
if ((!unit || thoroughUnitCheck) && id.endsWith("relative-to-own-supply")) { if ((!unit || thoroughUnitCheck) && id.endsWith("relative_to_own_supply")) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "%self"; unit = "%self";
} }
@@ -941,12 +942,12 @@ function createUtils() {
} }
if ( if (
(!unit || thoroughUnitCheck) && (!unit || thoroughUnitCheck) &&
(id.includes("days-between") || id.includes("days-since")) (id.includes("days_between") || id.includes("days_since"))
) { ) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Days"; unit = "Days";
} }
if ((!unit || thoroughUnitCheck) && id.includes("years-between")) { if ((!unit || thoroughUnitCheck) && id.includes("years_between")) {
if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`); if (unit) throw Error(`Unit "${unit}" already assigned "${id}"`);
unit = "Years"; unit = "Years";
} }
@@ -1170,6 +1171,7 @@ function createUtils() {
chartableIndex: { chartableIndex: {
/** /**
* @param {number} v * @param {number} v
* @returns {SerializedChartableIndex | null}
*/ */
serialize(v) { serialize(v) {
switch (v) { switch (v) {
@@ -1177,8 +1179,8 @@ function createUtils() {
return "date"; return "date";
case /** @satisfies {DecadeIndex} */ (1): case /** @satisfies {DecadeIndex} */ (1):
return "decade"; return "decade";
// case /** @satisfies {DifficultyEpoch} */ (2): case /** @satisfies {DifficultyEpoch} */ (2):
// return "difficulty"; return "diff. epoch";
// case /** @satisfies {HalvingEpoch} */ (4): // case /** @satisfies {HalvingEpoch} */ (4):
// return "halving"; // return "halving";
case /** @satisfies {Height} */ (5): case /** @satisfies {Height} */ (5):
@@ -1196,7 +1198,7 @@ function createUtils() {
} }
}, },
/** /**
* @param {string} v * @param {SerializedChartableIndex} v
* @returns {Index} * @returns {Index}
*/ */
deserialize(v) { deserialize(v) {
@@ -1207,6 +1209,8 @@ function createUtils() {
return /** @satisfies {DateIndex} */ (0); return /** @satisfies {DateIndex} */ (0);
case "week": case "week":
return /** @satisfies {WeekIndex} */ (22); return /** @satisfies {WeekIndex} */ (22);
case "diff. epoch":
return /** @satisfies {DifficultyEpoch} */ (2);
case "month": case "month":
return /** @satisfies {MonthIndex} */ (7); return /** @satisfies {MonthIndex} */ (7);
case "quarter": case "quarter":
@@ -1354,6 +1358,7 @@ function createUtils() {
const api = (() => { const api = (() => {
const CACHE_NAME = "api"; const CACHE_NAME = "api";
const API_VECS_PREFIX = "/api/vecs";
/** /**
* @template T * @template T
@@ -1362,7 +1367,7 @@ function createUtils() {
* @param {boolean} [mustBeArray] * @param {boolean} [mustBeArray]
*/ */
async function fetchApi(callback, path, mustBeArray) { async function fetchApi(callback, path, mustBeArray) {
const url = `/api${path}`; const url = `${API_VECS_PREFIX}${path}`;
/** @type {T | null} */ /** @type {T | null} */
let cachedJson = null; let cachedJson = null;
@@ -1452,7 +1457,7 @@ function createUtils() {
* @param {number} [to] * @param {number} [to]
*/ */
function genPath(index, vecId, from, to) { function genPath(index, vecId, from, to) {
let path = `/query?index=${serde.index.serialize(index)}&values=${vecId}`; let path = `/query?index=${serde.index.serialize(index)}&ids=${vecId}`;
if (from !== undefined) { if (from !== undefined) {
path += `&from=${from}`; path += `&from=${from}`;
} }
@@ -1469,7 +1474,7 @@ function createUtils() {
* @param {number} from * @param {number} from
*/ */
genUrl(index, vecId, from) { genUrl(index, vecId, from) {
return `/api${genPath(index, vecId, from)}`; return `${API_VECS_PREFIX}${genPath(index, vecId, from)}`;
}, },
/** /**
* @template {number | OHLCTuple} [T=number] * @template {number | OHLCTuple} [T=number]

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff