v0.3.0-alpha.0
This commit is contained in:
nym21
2026-04-01 22:41:39 +02:00
committed by GitHub
664 changed files with 10328 additions and 9041 deletions

129
Cargo.lock generated
View File

@@ -338,7 +338,7 @@ checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af"
[[package]]
name = "brk"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_bencher",
"brk_bindgen",
@@ -399,7 +399,7 @@ dependencies = [
[[package]]
name = "brk_alloc"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"libmimalloc-sys",
"mimalloc",
@@ -407,7 +407,7 @@ dependencies = [
[[package]]
name = "brk_bencher"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_error",
"brk_logger",
@@ -417,14 +417,14 @@ dependencies = [
[[package]]
name = "brk_bencher_visualizer"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"plotters",
]
[[package]]
name = "brk_bindgen"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_cohort",
"brk_query",
@@ -437,14 +437,13 @@ dependencies = [
[[package]]
name = "brk_cli"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"anyhow",
"brk_alloc",
"brk_computer",
"brk_error",
"brk_indexer",
"brk_iterator",
"brk_logger",
"brk_mempool",
"brk_query",
@@ -463,7 +462,7 @@ dependencies = [
[[package]]
name = "brk_client"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_cohort",
"brk_types",
@@ -474,7 +473,7 @@ dependencies = [
[[package]]
name = "brk_cohort"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_error",
"brk_traversable",
@@ -486,7 +485,7 @@ dependencies = [
[[package]]
name = "brk_computer"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"bitcoin",
"brk_alloc",
@@ -494,12 +493,10 @@ dependencies = [
"brk_cohort",
"brk_error",
"brk_indexer",
"brk_iterator",
"brk_logger",
"brk_oracle",
"brk_reader",
"brk_rpc",
"brk_store",
"brk_traversable",
"brk_types",
"color-eyre",
@@ -517,7 +514,7 @@ dependencies = [
[[package]]
name = "brk_error"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"bitcoin",
"bitcoincore-rpc",
@@ -534,7 +531,7 @@ dependencies = [
[[package]]
name = "brk_fetcher"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_error",
"brk_logger",
@@ -546,14 +543,13 @@ dependencies = [
[[package]]
name = "brk_indexer"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"bitcoin",
"brk_alloc",
"brk_bencher",
"brk_cohort",
"brk_error",
"brk_iterator",
"brk_logger",
"brk_reader",
"brk_rpc",
@@ -573,7 +569,7 @@ dependencies = [
[[package]]
name = "brk_iterator"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_error",
"brk_reader",
@@ -583,7 +579,7 @@ dependencies = [
[[package]]
name = "brk_logger"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"jiff",
"owo-colors",
@@ -594,7 +590,7 @@ dependencies = [
[[package]]
name = "brk_mempool"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_error",
"brk_logger",
@@ -609,7 +605,7 @@ dependencies = [
[[package]]
name = "brk_oracle"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_indexer",
"brk_types",
@@ -619,7 +615,7 @@ dependencies = [
[[package]]
name = "brk_query"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"bitcoin",
"brk_computer",
@@ -641,7 +637,7 @@ dependencies = [
[[package]]
name = "brk_reader"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"bitcoin",
"brk_error",
@@ -656,7 +652,7 @@ dependencies = [
[[package]]
name = "brk_rpc"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"bitcoin",
"bitcoincore-rpc",
@@ -673,7 +669,7 @@ dependencies = [
[[package]]
name = "brk_server"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"aide",
"axum",
@@ -708,7 +704,7 @@ dependencies = [
[[package]]
name = "brk_store"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_error",
"brk_types",
@@ -719,7 +715,7 @@ dependencies = [
[[package]]
name = "brk_traversable"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"brk_traversable_derive",
"brk_types",
@@ -732,7 +728,7 @@ dependencies = [
[[package]]
name = "brk_traversable_derive"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"proc-macro2",
"quote",
@@ -741,7 +737,7 @@ dependencies = [
[[package]]
name = "brk_types"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"bitcoin",
"brk_error",
@@ -764,7 +760,7 @@ dependencies = [
[[package]]
name = "brk_website"
version = "0.2.5"
version = "0.3.0-alpha.0"
dependencies = [
"axum",
"brk_logger",
@@ -1672,9 +1668,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "hyper"
version = "1.8.1"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11"
checksum = "6299f016b246a94207e63da54dbe807655bf9e00044f73ded42c3ac5305fbcca"
dependencies = [
"atomic-waker",
"bytes",
@@ -1686,7 +1682,6 @@ dependencies = [
"httpdate",
"itoa",
"pin-project-lite",
"pin-utils",
"smallvec",
"tokio",
]
@@ -1967,9 +1962,9 @@ checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07"
[[package]]
name = "js-sys"
version = "0.3.92"
version = "0.3.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc4c90f45aa2e6eacbe8645f77fdea542ac97a494bcd117a67df9ff4d611f995"
checksum = "2e04e2ef80ce82e13552136fabeef8a5ed1f985a96805761cbb9a2c34e7664d9"
dependencies = [
"once_cell",
"wasm-bindgen",
@@ -2007,9 +2002,9 @@ checksum = "803ec87c9cfb29b9d2633f20cba1f488db3fd53f2158b1024cbefb47ba05d413"
[[package]]
name = "libc"
version = "0.2.183"
version = "0.2.184"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d"
checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af"
[[package]]
name = "libloading"
@@ -2324,12 +2319,6 @@ version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd"
[[package]]
name = "pin-utils"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkg-config"
version = "0.3.32"
@@ -2545,9 +2534,9 @@ dependencies = [
[[package]]
name = "rawdb"
version = "0.9.0"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fddb06a11fcc5f7f44d9b5bee4ab61b5a1135232b2fd239253428abd192ba504"
checksum = "83fd9f9db42fd2d1adfbd7cf447f021776b3b8fd15e09788988fc18c61e1f6bc"
dependencies = [
"libc",
"log",
@@ -2916,9 +2905,9 @@ dependencies = [
[[package]]
name = "serde_spanned"
version = "1.1.0"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "876ac351060d4f882bb1032b6369eb0aef79ad9df1ea8bc404874d8cc3d0cd98"
checksum = "6662b5879511e06e8999a8a235d848113e942c9124f211511b16466ee2995f26"
dependencies = [
"serde_core",
]
@@ -3182,9 +3171,9 @@ dependencies = [
[[package]]
name = "toml"
version = "1.1.0+spec-1.1.0"
version = "1.1.1+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8195ca05e4eb728f4ba94f3e3291661320af739c4e43779cbdfae82ab239fcc"
checksum = "994b95d9e7bae62b34bab0e2a4510b801fa466066a6a8b2b57361fa1eba068ee"
dependencies = [
"indexmap",
"serde_core",
@@ -3197,27 +3186,27 @@ dependencies = [
[[package]]
name = "toml_datetime"
version = "1.1.0+spec-1.1.0"
version = "1.1.1+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97251a7c317e03ad83774a8752a7e81fb6067740609f75ea2b585b569a59198f"
checksum = "3165f65f62e28e0115a00b2ebdd37eb6f3b641855f9d636d3cd4103767159ad7"
dependencies = [
"serde_core",
]
[[package]]
name = "toml_parser"
version = "1.1.0+spec-1.1.0"
version = "1.1.1+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2334f11ee363607eb04df9b8fc8a13ca1715a72ba8662a26ac285c98aabb4011"
checksum = "39ca317ebc49f06bd748bfba29533eac9485569dc9bf80b849024b025e814fb9"
dependencies = [
"winnow",
]
[[package]]
name = "toml_writer"
version = "1.1.0+spec-1.1.0"
version = "1.1.1+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d282ade6016312faf3e41e57ebbba0c073e4056dab1232ab1cb624199648f8ed"
checksum = "756daf9b1013ebe47a8776667b466417e2d4c5679d441c26230efd9ef78692db"
[[package]]
name = "tower"
@@ -3439,9 +3428,9 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]]
name = "vecdb"
version = "0.9.0"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a33f1cbef9bf38048ee1b51328366f0a734e06bcc0b9739d68fef9ecce43d0b8"
checksum = "5422c45d12de71456700c199f9553319cb99e76311e413316dca7e9efd5133b6"
dependencies = [
"itoa",
"libc",
@@ -3462,9 +3451,9 @@ dependencies = [
[[package]]
name = "vecdb_derive"
version = "0.9.0"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33d31f03d1c7269d65195fb4d54c1d510b124807871bd11af7d10a08700d7590"
checksum = "5b075be4cec2d718d40dc422cef038c10d6fcce4aad594199cc0a301a4985146"
dependencies = [
"quote",
"syn",
@@ -3512,9 +3501,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen"
version = "0.2.115"
version = "0.2.117"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6523d69017b7633e396a89c5efab138161ed5aafcbc8d3e5c5a42ae38f50495a"
checksum = "0551fc1bb415591e3372d0bc4780db7e587d84e2a7e79da121051c5c4b89d0b0"
dependencies = [
"cfg-if",
"once_cell",
@@ -3525,9 +3514,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.115"
version = "0.2.117"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e3a6c758eb2f701ed3d052ff5737f5bfe6614326ea7f3bbac7156192dc32e67"
checksum = "7fbdf9a35adf44786aecd5ff89b4563a90325f9da0923236f6104e603c7e86be"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -3535,9 +3524,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.115"
version = "0.2.117"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "921de2737904886b52bcbb237301552d05969a6f9c40d261eb0533c8b055fedf"
checksum = "dca9693ef2bab6d4e6707234500350d8dad079eb508dca05530c85dc3a529ff2"
dependencies = [
"bumpalo",
"proc-macro2",
@@ -3548,9 +3537,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.115"
version = "0.2.117"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a93e946af942b58934c604527337bad9ae33ba1d5c6900bbb41c2c07c2364a93"
checksum = "39129a682a6d2d841b6c429d0c51e5cb0ed1a03829d8b3d1e69a011e62cb3d3b"
dependencies = [
"unicode-ident",
]
@@ -3591,9 +3580,9 @@ dependencies = [
[[package]]
name = "web-sys"
version = "0.3.92"
version = "0.3.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84cde8507f4d7cfcb1185b8cb5890c494ffea65edbe1ba82cfd63661c805ed94"
checksum = "cd70027e39b12f0849461e08ffc50b9cd7688d942c1c8e3c7b22273236b4dd0a"
dependencies = [
"js-sys",
"wasm-bindgen",
@@ -3788,9 +3777,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
version = "1.0.0"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a90e88e4667264a994d34e6d1ab2d26d398dcdca8b7f52bec8668957517fc7d8"
checksum = "09dac053f1cd375980747450bfc7250c264eaae0583872e845c0c7cd578872b5"
[[package]]
name = "wio"

View File

@@ -4,7 +4,7 @@ members = ["crates/*"]
package.description = "The Bitcoin Research Kit is a suite of tools designed to extract, compute and display data stored on a Bitcoin Core node"
package.license = "MIT"
package.edition = "2024"
package.version = "0.2.5"
package.version = "0.3.0-alpha.0"
package.homepage = "https://bitcoinresearchkit.org"
package.repository = "https://github.com/bitcoinresearchkit/brk"
package.readme = "README.md"
@@ -40,29 +40,29 @@ aide = { version = "0.16.0-alpha.3", features = ["axum-json", "axum-query"] }
axum = { version = "0.8.8", default-features = false, features = ["http1", "json", "query", "tokio", "tracing"] }
bitcoin = { version = "0.32.8", features = ["serde"] }
bitcoincore-rpc = "0.19.0"
brk_alloc = { version = "0.2.5", path = "crates/brk_alloc" }
brk_bencher = { version = "0.2.5", path = "crates/brk_bencher" }
brk_bindgen = { version = "0.2.5", path = "crates/brk_bindgen" }
brk_cli = { version = "0.2.5", path = "crates/brk_cli" }
brk_client = { version = "0.2.5", path = "crates/brk_client" }
brk_cohort = { version = "0.2.5", path = "crates/brk_cohort" }
brk_computer = { version = "0.2.5", path = "crates/brk_computer" }
brk_error = { version = "0.2.5", path = "crates/brk_error" }
brk_fetcher = { version = "0.2.5", path = "crates/brk_fetcher" }
brk_indexer = { version = "0.2.5", path = "crates/brk_indexer" }
brk_iterator = { version = "0.2.5", path = "crates/brk_iterator" }
brk_logger = { version = "0.2.5", path = "crates/brk_logger" }
brk_mempool = { version = "0.2.5", path = "crates/brk_mempool" }
brk_oracle = { version = "0.2.5", path = "crates/brk_oracle" }
brk_query = { version = "0.2.5", path = "crates/brk_query", features = ["tokio"] }
brk_reader = { version = "0.2.5", path = "crates/brk_reader" }
brk_rpc = { version = "0.2.5", path = "crates/brk_rpc" }
brk_server = { version = "0.2.5", path = "crates/brk_server" }
brk_store = { version = "0.2.5", path = "crates/brk_store" }
brk_traversable = { version = "0.2.5", path = "crates/brk_traversable", features = ["pco", "derive"] }
brk_traversable_derive = { version = "0.2.5", path = "crates/brk_traversable_derive" }
brk_types = { version = "0.2.5", path = "crates/brk_types" }
brk_website = { version = "0.2.5", path = "crates/brk_website" }
brk_alloc = { version = "0.3.0-alpha.0", path = "crates/brk_alloc" }
brk_bencher = { version = "0.3.0-alpha.0", path = "crates/brk_bencher" }
brk_bindgen = { version = "0.3.0-alpha.0", path = "crates/brk_bindgen" }
brk_cli = { version = "0.3.0-alpha.0", path = "crates/brk_cli" }
brk_client = { version = "0.3.0-alpha.0", path = "crates/brk_client" }
brk_cohort = { version = "0.3.0-alpha.0", path = "crates/brk_cohort" }
brk_computer = { version = "0.3.0-alpha.0", path = "crates/brk_computer" }
brk_error = { version = "0.3.0-alpha.0", path = "crates/brk_error" }
brk_fetcher = { version = "0.3.0-alpha.0", path = "crates/brk_fetcher" }
brk_indexer = { version = "0.3.0-alpha.0", path = "crates/brk_indexer" }
brk_iterator = { version = "0.3.0-alpha.0", path = "crates/brk_iterator" }
brk_logger = { version = "0.3.0-alpha.0", path = "crates/brk_logger" }
brk_mempool = { version = "0.3.0-alpha.0", path = "crates/brk_mempool" }
brk_oracle = { version = "0.3.0-alpha.0", path = "crates/brk_oracle" }
brk_query = { version = "0.3.0-alpha.0", path = "crates/brk_query", features = ["tokio"] }
brk_reader = { version = "0.3.0-alpha.0", path = "crates/brk_reader" }
brk_rpc = { version = "0.3.0-alpha.0", path = "crates/brk_rpc" }
brk_server = { version = "0.3.0-alpha.0", path = "crates/brk_server" }
brk_store = { version = "0.3.0-alpha.0", path = "crates/brk_store" }
brk_traversable = { version = "0.3.0-alpha.0", path = "crates/brk_traversable", features = ["pco", "derive"] }
brk_traversable_derive = { version = "0.3.0-alpha.0", path = "crates/brk_traversable_derive" }
brk_types = { version = "0.3.0-alpha.0", path = "crates/brk_types" }
brk_website = { version = "0.3.0-alpha.0", path = "crates/brk_website" }
byteview = "0.10.1"
color-eyre = "0.6.5"
corepc-client = { package = "brk-corepc-client", version = "0.11.0", features = ["client-sync"] }
@@ -87,7 +87,7 @@ tower-http = { version = "0.6.8", features = ["catch-panic", "compression-br", "
tower-layer = "0.3"
tracing = { version = "0.1", default-features = false, features = ["std"] }
ureq = { version = "3.3.0", features = ["json"] }
vecdb = { version = "0.9.0", features = ["derive", "serde_json", "pco", "schemars"] }
vecdb = { version = "0.9.2", features = ["derive", "serde_json", "pco", "schemars"] }
# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
[workspace.metadata.release]
@@ -95,6 +95,7 @@ shared-version = true
tag-name = "v{{version}}"
pre-release-commit-message = "release: v{{version}}"
tag-message = "release: v{{version}}"
allow-branch = ["main", "next"]
[workspace.metadata.dist]
cargo-dist-version = "0.30.2"

View File

@@ -82,18 +82,19 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
} else {
writeln!(output, " const params = new URLSearchParams();").unwrap();
for param in &endpoint.query_params {
let ident = sanitize_ident(&param.name);
if param.required {
writeln!(
output,
" params.set('{}', String({}));",
param.name, param.name
param.name, ident
)
.unwrap();
} else {
writeln!(
output,
" if ({} !== undefined) params.set('{}', String({}));",
param.name, param.name, param.name
ident, param.name, ident
)
.unwrap();
}
@@ -127,14 +128,19 @@ fn endpoint_to_method_name(endpoint: &Endpoint) -> String {
fn build_method_params(endpoint: &Endpoint) -> String {
let mut params = Vec::new();
for param in &endpoint.path_params {
params.push(param.name.clone());
params.push(sanitize_ident(&param.name));
}
for param in &endpoint.query_params {
params.push(param.name.clone());
params.push(sanitize_ident(&param.name));
}
params.join(", ")
}
/// Strip characters invalid in JS identifiers (e.g. `[]` from `txId[]`).
fn sanitize_ident(name: &str) -> String {
name.replace(['[', ']'], "")
}
fn build_path_template(path: &str, path_params: &[Parameter]) -> String {
let mut result = path.to_string();
for param in path_params {

View File

@@ -22,6 +22,20 @@ pub fn generate_base_client(output: &mut String) {
const _isBrowser = typeof window !== 'undefined' && 'caches' in window;
const _runIdle = (/** @type {{VoidFunction}} */ fn) => (globalThis.requestIdleCallback ?? setTimeout)(fn);
const _defaultCacheName = '__BRK_CLIENT__';
/** @param {{*}} v */
const _addCamelGetters = (v) => {{
if (Array.isArray(v)) {{ v.forEach(_addCamelGetters); return v; }}
if (v && typeof v === 'object' && v.constructor === Object) {{
for (const k in v) {{
if (k.includes('_')) {{
const c = k.replace(/_([a-z])/g, (_, l) => l.toUpperCase());
if (!(c in v)) Object.defineProperty(v, c, {{ get() {{ return this[k]; }} }});
}}
_addCamelGetters(v[k]);
}}
}}
return v;
}};
/**
* @param {{string|boolean|undefined}} cache
@@ -418,7 +432,7 @@ class BrkClientBase {{
const cachePromise = cache?.match(url).then(async (res) => {{
cachedRes = res ?? null;
if (!res) return null;
const json = await res.json();
const json = _addCamelGetters(await res.json());
if (!resolved && onUpdate) {{
resolved = true;
onUpdate(json);
@@ -428,7 +442,7 @@ class BrkClientBase {{
const networkPromise = this.get(path).then(async (res) => {{
const cloned = res.clone();
const json = await res.json();
const json = _addCamelGetters(await res.json());
// Skip update if ETag matches and cache already delivered
if (cachedRes?.headers.get('ETag') === res.headers.get('ETag')) {{
if (!resolved && onUpdate) {{

View File

@@ -101,7 +101,7 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
.response_type
.as_deref()
.map(js_type_to_python)
.unwrap_or_else(|| "Any".to_string()),
.unwrap_or_else(|| "str".to_string()),
);
let return_type = if endpoint.supports_csv {
@@ -159,11 +159,19 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
// Build path
let path = build_path_template(&endpoint.path, &endpoint.path_params);
let fetch_method = if endpoint.returns_json() {
"get_json"
} else {
"get_text"
};
if endpoint.query_params.is_empty() {
if endpoint.path_params.is_empty() {
writeln!(output, " return self.get_json('{}')", path).unwrap();
writeln!(output, " return self.{}('{}')", fetch_method, path)
.unwrap();
} else {
writeln!(output, " return self.get_json(f'{}')", path).unwrap();
writeln!(output, " return self.{}(f'{}')", fetch_method, path)
.unwrap();
}
} else {
writeln!(output, " params = []").unwrap();
@@ -197,9 +205,9 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
if endpoint.supports_csv {
writeln!(output, " if format == 'csv':").unwrap();
writeln!(output, " return self.get_text(path)").unwrap();
writeln!(output, " return self.get_json(path)").unwrap();
writeln!(output, " return self.{}(path)", fetch_method).unwrap();
} else {
writeln!(output, " return self.get_json(path)").unwrap();
writeln!(output, " return self.{}(path)", fetch_method).unwrap();
}
}

View File

@@ -93,7 +93,7 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
.response_type
.as_deref()
.map(js_type_to_rust)
.unwrap_or_else(|| "serde_json::Value".to_string());
.unwrap_or_else(|| "String".to_string());
let return_type = if endpoint.supports_csv {
format!("FormatResponse<{}>", base_return_type)
@@ -132,29 +132,43 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
.unwrap();
let (path, index_arg) = build_path_template(endpoint);
let fetch_method = if endpoint.returns_json() {
"get_json"
} else {
"get_text"
};
if endpoint.query_params.is_empty() {
writeln!(
output,
" self.base.get_json(&format!(\"{}\"{}))",
path, index_arg
" self.base.{}(&format!(\"{}\"{}))",
fetch_method, path, index_arg
)
.unwrap();
} else {
writeln!(output, " let mut query = Vec::new();").unwrap();
for param in &endpoint.query_params {
if param.required {
let ident = sanitize_ident(&param.name);
let is_array = param.param_type.ends_with("[]");
if is_array {
writeln!(
output,
" for v in {} {{ query.push(format!(\"{}={{}}\", v)); }}",
ident, param.name
)
.unwrap();
} else if param.required {
writeln!(
output,
" query.push(format!(\"{}={{}}\", {}));",
param.name, param.name
param.name, ident
)
.unwrap();
} else {
writeln!(
output,
" if let Some(v) = {} {{ query.push(format!(\"{}={{}}\", v)); }}",
param.name, param.name
ident, param.name
)
.unwrap();
}
@@ -177,12 +191,14 @@ pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
writeln!(output, " }} else {{").unwrap();
writeln!(
output,
" self.base.get_json(&path).map(FormatResponse::Json)"
" self.base.{}(&path).map(FormatResponse::Json)",
fetch_method
)
.unwrap();
writeln!(output, " }}").unwrap();
} else {
writeln!(output, " self.base.get_json(&path)").unwrap();
writeln!(output, " self.base.{}(&path)", fetch_method)
.unwrap();
}
}
@@ -198,26 +214,35 @@ fn build_method_params(endpoint: &Endpoint) -> String {
let mut params = Vec::new();
for param in &endpoint.path_params {
let rust_type = param_type_to_rust(&param.param_type);
params.push(format!(", {}: {}", param.name, rust_type));
params.push(format!(", {}: {}", sanitize_ident(&param.name), rust_type));
}
for param in &endpoint.query_params {
let rust_type = param_type_to_rust(&param.param_type);
let name = sanitize_ident(&param.name);
if param.required {
params.push(format!(", {}: {}", param.name, rust_type));
params.push(format!(", {}: {}", name, rust_type));
} else {
params.push(format!(", {}: Option<{}>", param.name, rust_type));
params.push(format!(", {}: Option<{}>", name, rust_type));
}
}
params.join("")
}
/// Strip characters invalid in Rust identifiers (e.g. `[]` from `txId[]`).
fn sanitize_ident(name: &str) -> String {
name.replace(['[', ']'], "")
}
/// Convert parameter type to Rust type for function signatures.
fn param_type_to_rust(param_type: &str) -> String {
if let Some(inner) = param_type.strip_suffix("[]") {
return format!("&[{}]", param_type_to_rust(inner));
}
match param_type {
"string" | "*" => "&str".to_string(),
"integer" | "number" => "i64".to_string(),
"boolean" => "bool".to_string(),
other => other.to_string(), // Domain types like Index, SeriesName, Format
other => other.to_string(),
}
}

View File

@@ -43,6 +43,11 @@ impl Endpoint {
self.method == "GET" && !self.deprecated
}
/// Returns true if this endpoint returns JSON (has a response_type extracted from application/json).
pub fn returns_json(&self) -> bool {
self.response_type.is_some()
}
/// Returns the operation ID or generates one from the path.
/// The returned string uses the raw case from the spec (typically camelCase).
pub fn operation_name(&self) -> String {

View File

@@ -74,6 +74,9 @@ pub fn escape_python_keyword(name: &str) -> String {
"try", "while", "with", "yield",
];
// Strip characters invalid in identifiers (e.g. `[]` from `txId[]`)
let name = name.replace(['[', ']'], "");
// Prefix with underscore if starts with digit
let name = if name.starts_with(|c: char| c.is_ascii_digit()) {
format!("_{}", name)

View File

@@ -13,7 +13,6 @@ brk_alloc = { workspace = true }
brk_computer = { workspace = true }
brk_error = { workspace = true, features = ["tokio", "vecdb"] }
brk_indexer = { workspace = true }
brk_iterator = { workspace = true }
brk_logger = { workspace = true }
brk_mempool = { workspace = true }
brk_query = { workspace = true }
@@ -26,7 +25,7 @@ owo-colors = { workspace = true }
tracing = { workspace = true }
serde = { workspace = true }
tokio = { workspace = true }
toml = "1.1.0"
toml = "1.1.1"
vecdb = { workspace = true }
[[bin]]

View File

@@ -1,11 +1,8 @@
# BRK CLI
Command-line interface for running a Bitcoin Research Kit instance.
Run your own Bitcoin Research Kit instance. One binary, one command. Full sync in ~4-7h depending on hardware. ~44% disk overhead vs 250% for mempool/electrs.
## Demo
- [bitview.space](https://bitview.space) - web interface
- [bitview.space/api](https://bitview.space/api) - API docs
[bitview.space](https://bitview.space) is the official free hosted instance.
## Requirements

View File

@@ -10,7 +10,6 @@ use brk_alloc::Mimalloc;
use brk_computer::Computer;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_mempool::Mempool;
use brk_query::AsyncQuery;
use brk_reader::Reader;
@@ -37,8 +36,6 @@ pub fn main() -> anyhow::Result<()> {
let reader = Reader::new(config.blocksdir(), &client);
let blocks = Blocks::new(&client, &reader);
let mut indexer = Indexer::forced_import(&config.brkdir())?;
#[cfg(not(debug_assertions))]
@@ -52,7 +49,7 @@ pub fn main() -> anyhow::Result<()> {
info!("Indexing {blocks_behind} blocks before starting server...");
info!("---");
sleep(Duration::from_secs(10));
indexer.index(&blocks, &client, &exit)?;
indexer.index(&reader, &client, &exit)?;
drop(indexer);
Mimalloc::collect();
indexer = Indexer::forced_import(&config.brkdir())?;
@@ -102,14 +99,14 @@ pub fn main() -> anyhow::Result<()> {
let total_start = Instant::now();
let starting_indexes = if cfg!(debug_assertions) {
indexer.checked_index(&blocks, &client, &exit)?
indexer.checked_index(&reader, &client, &exit)?
} else {
indexer.index(&blocks, &client, &exit)?
indexer.index(&reader, &client, &exit)?
};
Mimalloc::collect();
computer.compute(&indexer, starting_indexes, &reader, &exit)?;
computer.compute(&indexer, starting_indexes, &exit)?;
info!("Total time: {:?}", total_start.elapsed());
info!("Waiting for new blocks...");

File diff suppressed because it is too large Load Diff

View File

@@ -14,3 +14,6 @@ brk_traversable = { workspace = true }
vecdb = { workspace = true }
rayon = { workspace = true }
serde = { workspace = true }
[package.metadata.cargo-machete]
ignored = ["vecdb"]

View File

@@ -14,11 +14,8 @@ brk_error = { workspace = true, features = ["vecdb"] }
brk_cohort = { workspace = true }
brk_indexer = { workspace = true }
brk_oracle = { workspace = true }
brk_iterator = { workspace = true }
brk_logger = { workspace = true }
brk_reader = { workspace = true }
brk_rpc = { workspace = true, features = ["corepc"] }
brk_store = { workspace = true }
brk_traversable = { workspace = true }
brk_types = { workspace = true }
derive_more = { workspace = true }
@@ -33,6 +30,7 @@ smallvec = { workspace = true }
vecdb = { workspace = true }
[dev-dependencies]
brk_reader = { workspace = true }
brk_alloc = { workspace = true }
brk_bencher = { workspace = true }
color-eyre = { workspace = true }

View File

@@ -8,7 +8,6 @@ use std::{
use brk_alloc::Mimalloc;
use brk_computer::Computer;
use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use vecdb::Exit;
@@ -31,8 +30,6 @@ pub fn main() -> color_eyre::Result<()> {
let reader = Reader::new(bitcoin_dir.join("blocks"), &client);
let blocks = Blocks::new(&client, &reader);
let mut indexer = Indexer::forced_import(&outputs_dir)?;
let exit = Exit::new();
@@ -42,7 +39,7 @@ pub fn main() -> color_eyre::Result<()> {
let chain_height = client.get_last_height()?;
let indexed_height = indexer.vecs.starting_height();
if u32::from(chain_height).saturating_sub(u32::from(indexed_height)) > 1000 {
indexer.checked_index(&blocks, &client, &exit)?;
indexer.checked_index(&reader, &client, &exit)?;
drop(indexer);
Mimalloc::collect();
indexer = Indexer::forced_import(&outputs_dir)?;
@@ -52,11 +49,11 @@ pub fn main() -> color_eyre::Result<()> {
loop {
let i = Instant::now();
let starting_indexes = indexer.checked_index(&blocks, &client, &exit)?;
let starting_indexes = indexer.checked_index(&reader, &client, &exit)?;
Mimalloc::collect();
computer.compute(&indexer, starting_indexes, &reader, &exit)?;
computer.compute(&indexer, starting_indexes, &exit)?;
dbg!(i.elapsed());
sleep(Duration::from_secs(10));
}

View File

@@ -5,7 +5,6 @@ use brk_bencher::Bencher;
use brk_computer::Computer;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use tracing::{debug, info};
@@ -28,8 +27,6 @@ pub fn main() -> Result<()> {
let reader = Reader::new(bitcoin_dir.join("blocks"), &client);
let blocks = Blocks::new(&client, &reader);
let mut indexer = Indexer::forced_import(&outputs_dir)?;
let mut computer = Computer::forced_import(&outputs_benches_dir, &indexer)?;
@@ -47,13 +44,13 @@ pub fn main() -> Result<()> {
});
let i = Instant::now();
let starting_indexes = indexer.index(&blocks, &client, &exit)?;
let starting_indexes = indexer.index(&reader, &client, &exit)?;
info!("Done in {:?}", i.elapsed());
Mimalloc::collect();
let i = Instant::now();
computer.compute(&indexer, starting_indexes, &reader, &exit)?;
computer.compute(&indexer, starting_indexes, &exit)?;
info!("Done in {:?}", i.elapsed());
// We want to benchmark the drop too

View File

@@ -9,7 +9,6 @@ use brk_alloc::Mimalloc;
use brk_bencher::Bencher;
use brk_computer::Computer;
use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use tracing::{debug, info};
@@ -45,15 +44,13 @@ pub fn main() -> color_eyre::Result<()> {
let reader = Reader::new(bitcoin_dir.join("blocks"), &client);
let blocks = Blocks::new(&client, &reader);
let mut indexer = Indexer::forced_import(&outputs_dir)?;
// Pre-run indexer if too far behind, then drop and reimport to reduce memory
let chain_height = client.get_last_height()?;
let indexed_height = indexer.vecs.starting_height();
if chain_height.saturating_sub(*indexed_height) > 1000 {
indexer.index(&blocks, &client, &exit)?;
indexer.index(&reader, &client, &exit)?;
drop(indexer);
Mimalloc::collect();
indexer = Indexer::forced_import(&outputs_dir)?;
@@ -63,13 +60,13 @@ pub fn main() -> color_eyre::Result<()> {
loop {
let i = Instant::now();
let starting_indexes = indexer.index(&blocks, &client, &exit)?;
let starting_indexes = indexer.index(&reader, &client, &exit)?;
info!("Done in {:?}", i.elapsed());
Mimalloc::collect();
let i = Instant::now();
computer.compute(&indexer, starting_indexes, &reader, &exit)?;
computer.compute(&indexer, starting_indexes, &exit)?;
info!("Done in {:?}", i.elapsed());
sleep(Duration::from_secs(60));

View File

@@ -7,7 +7,7 @@ use brk_types::{
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use tracing::{debug, info};
use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableVec, VecIndex, WritableVec};
use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableVec, VecIndex, WritableVec, unlikely};
use crate::{
distribution::{
@@ -243,7 +243,11 @@ pub(crate) fn process_blocks(
for height in starting_height.to_usize()..=last_height.to_usize() {
let height = Height::from(height);
info!("Processing chain at {}...", height);
if unlikely(height.is_multiple_of(100)) {
info!("Processing chain at {}...", height);
} else {
debug!("Processing chain at {}...", height);
}
// Get block metadata from pre-collected vecs
let offset = height.to_usize() - start_usize;

View File

@@ -4,7 +4,6 @@ use std::{fs, path::Path, thread, time::Instant};
use brk_error::Result;
use brk_indexer::Indexer;
use brk_reader::Reader;
use brk_traversable::Traversable;
use brk_types::Version;
use tracing::info;
@@ -23,7 +22,6 @@ mod market;
mod mining;
mod outputs;
mod pools;
mod positions;
pub mod prices;
mod scripts;
mod supply;
@@ -35,7 +33,6 @@ pub struct Computer<M: StorageMode = Rw> {
pub mining: Box<mining::Vecs<M>>,
pub transactions: Box<transactions::Vecs<M>>,
pub scripts: Box<scripts::Vecs<M>>,
pub positions: Box<positions::Vecs<M>>,
pub cointime: Box<cointime::Vecs<M>>,
pub constants: Box<constants::Vecs>,
pub indexes: Box<indexes::Vecs<M>>,
@@ -63,24 +60,12 @@ impl Computer {
const STACK_SIZE: usize = 8 * 1024 * 1024;
let big_thread = || thread::Builder::new().stack_size(STACK_SIZE);
let (indexes, positions) = timed("Imported indexes/positions", || {
thread::scope(|s| -> Result<_> {
let positions_handle = big_thread().spawn_scoped(s, || -> Result<_> {
Ok(Box::new(positions::Vecs::forced_import(
&computed_path,
VERSION,
)?))
})?;
let indexes = Box::new(indexes::Vecs::forced_import(
&computed_path,
VERSION,
indexer,
)?);
let positions = positions_handle.join().unwrap()?;
Ok((indexes, positions))
})
let indexes = timed("Imported indexes", || -> Result<_> {
Ok(Box::new(indexes::Vecs::forced_import(
&computed_path,
VERSION,
indexer,
)?))
})?;
let (constants, prices) = timed("Imported prices/constants", || -> Result<_> {
@@ -257,7 +242,6 @@ impl Computer {
market,
distribution,
supply,
positions,
pools,
cointime,
indexes,
@@ -278,7 +262,6 @@ impl Computer {
mining::DB_NAME,
transactions::DB_NAME,
scripts::DB_NAME,
positions::DB_NAME,
cointime::DB_NAME,
indicators::DB_NAME,
indexes::DB_NAME,
@@ -319,7 +302,6 @@ impl Computer {
&mut self,
indexer: &Indexer,
starting_indexes: brk_indexer::Indexes,
reader: &Reader,
exit: &Exit,
) -> Result<()> {
internal::cache_clear_all();
@@ -387,13 +369,6 @@ impl Computer {
)
})?;
let positions = scope.spawn(|| {
timed("Computed positions", || {
self.positions
.compute(indexer, &starting_indexes, reader, exit)
})
});
timed("Computed transactions", || {
self.transactions.compute(
indexer,
@@ -419,7 +394,6 @@ impl Computer {
)
})?;
positions.join().unwrap()?;
market.join().unwrap()?;
Ok(())
})?;
@@ -561,7 +535,6 @@ impl_iter_named!(
mining,
transactions,
scripts,
positions,
cointime,
constants,
indicators,

View File

@@ -25,7 +25,7 @@ impl Vecs {
indexer,
indexes,
&blocks.lookback,
&transactions.fees,
transactions,
prices,
starting_indexes,
exit,

View File

@@ -17,7 +17,7 @@ impl Vecs {
indexer: &Indexer,
indexes: &indexes::Vecs,
lookback: &blocks::LookbackVecs,
transactions_fees: &transactions::FeesVecs,
transactions: &transactions::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
@@ -67,7 +67,7 @@ impl Vecs {
starting_indexes.height,
&indexer.vecs.transactions.first_tx_index,
&indexes.height.tx_index_count,
&transactions_fees.fee.tx_index,
&transactions.fees.fee.tx_index,
exit,
)?;
Ok(())
@@ -95,6 +95,13 @@ impl Vecs {
self.subsidy
.compute_rest(starting_indexes.height, prices, exit)?;
self.output_volume.compute_subtract(
starting_indexes.height,
&transactions.volume.transfer_volume.block.sats,
&self.fees.block.sats,
exit,
)?;
self.unclaimed.block.sats.compute_transform(
starting_indexes.height,
&self.subsidy.block.sats,

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_types::Version;
use vecdb::Database;
use vecdb::{Database, EagerVec, ImportableVec};
use super::Vecs;
use crate::{
@@ -44,6 +44,7 @@ impl Vecs {
cached_starts,
)?,
fees: AmountPerBlockFull::forced_import(db, "fees", version, indexes, cached_starts)?,
output_volume: EagerVec::forced_import(db, "output_volume", version)?,
unclaimed: AmountPerBlockCumulative::forced_import(
db,
"unclaimed_rewards",

View File

@@ -1,6 +1,6 @@
use brk_traversable::Traversable;
use brk_types::{BasisPoints16, BasisPoints32};
use vecdb::{Rw, StorageMode};
use brk_types::{BasisPoints16, BasisPoints32, Height, Sats};
use vecdb::{EagerVec, PcoVec, Rw, StorageMode};
use crate::internal::{
AmountPerBlockCumulative, AmountPerBlockCumulativeRolling, AmountPerBlockFull,
@@ -12,6 +12,7 @@ pub struct Vecs<M: StorageMode = Rw> {
pub coinbase: AmountPerBlockCumulativeRolling<M>,
pub subsidy: AmountPerBlockCumulativeRolling<M>,
pub fees: AmountPerBlockFull<M>,
pub output_volume: M::Stored<EagerVec<PcoVec<Height, Sats>>>,
pub unclaimed: AmountPerBlockCumulative<M>,
#[traversable(wrap = "fees", rename = "dominance")]
pub fee_dominance: PercentPerBlock<BasisPoints16, M>,

View File

@@ -2,7 +2,6 @@ use std::{collections::BTreeMap, path::Path};
use brk_error::Result;
use brk_indexer::Indexer;
use brk_store::AnyStore;
use brk_traversable::Traversable;
use brk_types::{Addr, AddrBytes, Height, Indexes, OutputType, PoolSlug, Pools, TxOutIndex, pools};
use rayon::prelude::*;
@@ -114,8 +113,18 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let dep_version = indexer.vecs.blocks.coinbase_tag.version();
let pool_vec_version = self.pool.header().vec_version();
let pool_computed = self.pool.header().computed_version();
let expected = pool_vec_version + dep_version;
if expected != pool_computed {
tracing::warn!(
"Pool version mismatch: vec_version={pool_vec_version:?} + dep={dep_version:?} = {expected:?}, stored computed={pool_computed:?}, len={}",
self.pool.len()
);
}
self.pool
.validate_computed_version_or_reset(indexer.stores.height_to_coinbase_tag.version())?;
.validate_computed_version_or_reset(dep_version)?;
let first_txout_index = indexer.vecs.transactions.first_txout_index.reader();
let output_type = indexer.vecs.outputs.output_type.reader();
@@ -142,12 +151,12 @@ impl Vecs {
self.pool.truncate_if_needed_at(min)?;
indexer
.stores
.height_to_coinbase_tag
.iter()
.skip(min)
.try_for_each(|(_, coinbase_tag)| -> Result<()> {
let len = indexer.vecs.blocks.coinbase_tag.len();
indexer.vecs.blocks.coinbase_tag.try_for_each_range_at(
min,
len,
|coinbase_tag| -> Result<()> {
let tx_index = first_tx_index_cursor.next().unwrap();
let out_start = first_txout_index.get(tx_index.to_usize());
@@ -174,12 +183,13 @@ impl Vecs {
.map(|bytes| Addr::try_from(&bytes).unwrap())
.and_then(|addr| self.pools.find_from_addr(&addr))
})
.or_else(|| self.pools.find_from_coinbase_tag(&coinbase_tag))
.or_else(|| self.pools.find_from_coinbase_tag(&coinbase_tag.as_str()))
.unwrap_or(unknown);
self.pool.push(pool.slug);
Ok(())
})?;
},
)?;
let _lock = exit.lock();
self.pool.write()?;

View File

@@ -1,147 +0,0 @@
use std::{fs, path::Path};
use brk_error::Result;
use brk_indexer::Indexer;
use brk_reader::{Reader, XOR_LEN, XORBytes};
use brk_traversable::Traversable;
use brk_types::{BlkPosition, Height, Indexes, TxIndex, Version};
use tracing::info;
use vecdb::{
AnyStoredVec, AnyVec, Database, Exit, ImportableVec, PcoVec, ReadableVec, Rw, StorageMode,
WritableVec,
};
use crate::internal::db_utils::{finalize_db, open_db};
pub const DB_NAME: &str = "positions";
#[derive(Traversable)]
#[traversable(hidden)]
pub struct Vecs<M: StorageMode = Rw> {
db: Database,
pub block: M::Stored<PcoVec<Height, BlkPosition>>,
pub tx: M::Stored<PcoVec<TxIndex, BlkPosition>>,
}
impl Vecs {
pub(crate) fn forced_import(parent_path: &Path, parent_version: Version) -> Result<Self> {
let db = open_db(parent_path, DB_NAME, 1_000_000)?;
let version = parent_version;
let this = Self {
block: PcoVec::forced_import(&db, "position", version + Version::TWO)?,
tx: PcoVec::forced_import(&db, "position", version + Version::TWO)?,
db,
};
finalize_db(&this.db, &this)?;
Ok(this)
}
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
starting_indexes: &Indexes,
reader: &Reader,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
self.compute_(indexer, starting_indexes, reader, exit)?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact_deferred_default()
});
Ok(())
}
fn check_xor_bytes(&mut self, reader: &Reader) -> Result<()> {
let xor_path = self.db.path().join("xor.dat");
let current = reader.xor_bytes();
let cached = fs::read(&xor_path)
.ok()
.and_then(|b| <[u8; XOR_LEN]>::try_from(b).ok())
.map(XORBytes::from);
match cached {
Some(c) if c == current => return Ok(()),
Some(_) => {
info!("XOR bytes changed, resetting positions...");
self.block.reset()?;
self.tx.reset()?;
}
None => {}
}
fs::write(&xor_path, *current)?;
Ok(())
}
fn compute_(
&mut self,
indexer: &Indexer,
starting_indexes: &Indexes,
parser: &Reader,
exit: &Exit,
) -> Result<()> {
self.check_xor_bytes(parser)?;
// Validate computed versions against dependencies
let dep_version = indexer.vecs.transactions.first_tx_index.version()
+ indexer.vecs.transactions.height.version();
self.block.validate_computed_version_or_reset(dep_version)?;
self.tx.validate_computed_version_or_reset(dep_version)?;
let min_tx_index = TxIndex::from(self.tx.len()).min(starting_indexes.tx_index);
let Some(min_height) = indexer
.vecs
.transactions
.height
.collect_one(min_tx_index)
.map(|h: Height| h.min(starting_indexes.height))
else {
return Ok(());
};
let first_tx_at_min_height = indexer
.vecs
.transactions
.first_tx_index
.collect_one(min_height)
.unwrap();
self.block.truncate_if_needed(min_height)?;
self.tx.truncate_if_needed(first_tx_at_min_height)?;
parser
.read(
Some(min_height),
Some((indexer.vecs.transactions.first_tx_index.len() - 1).into()),
)
.iter()
.try_for_each(|block| -> Result<()> {
self.block.push(block.metadata().position());
block.tx_metadata().iter().for_each(|metadata| {
self.tx.push(metadata.position());
});
if *block.height() % 1_000 == 0 {
let _lock = exit.lock();
self.block.write()?;
self.tx.write()?;
}
Ok(())
})?;
let _lock = exit.lock();
self.block.write()?;
self.tx.write()?;
Ok(())
}
}

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{FeeRate, Indexes, Sats};
use brk_types::{FeeRate, Indexes, OutPoint, Sats, TxInIndex, VSize};
use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableVec, VecIndex, WritableVec, unlikely};
use super::super::size;
@@ -33,26 +33,47 @@ impl Vecs {
exit,
)?;
self.compute_fee_and_fee_rate(size_vecs, starting_indexes, exit)?;
self.compute_fees(indexer, indexes, size_vecs, starting_indexes, exit)?;
let (r3, r4) = rayon::join(
let (r1, (r2, r3)) = rayon::join(
|| {
self.fee
.derive_from_with_skip(indexer, indexes, starting_indexes, exit, 1)
},
|| {
self.fee_rate
.derive_from_with_skip(indexer, indexes, starting_indexes, exit, 1)
rayon::join(
|| {
self.fee_rate.derive_from_with_skip(
indexer,
indexes,
starting_indexes,
exit,
1,
)
},
|| {
self.effective_fee_rate.derive_from_with_skip(
indexer,
indexes,
starting_indexes,
exit,
1,
)
},
)
},
);
r1?;
r2?;
r3?;
r4?;
Ok(())
}
fn compute_fee_and_fee_rate(
fn compute_fees(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
size_vecs: &size::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
@@ -67,6 +88,9 @@ impl Vecs {
self.fee_rate
.tx_index
.validate_computed_version_or_reset(dep_version)?;
self.effective_fee_rate
.tx_index
.validate_computed_version_or_reset(dep_version)?;
let target = self
.input_value
@@ -78,6 +102,7 @@ impl Vecs {
.tx_index
.len()
.min(self.fee_rate.tx_index.len())
.min(self.effective_fee_rate.tx_index.len())
.min(starting_indexes.tx_index.to_usize());
if min >= target {
@@ -90,39 +115,171 @@ impl Vecs {
self.fee_rate
.tx_index
.truncate_if_needed(starting_indexes.tx_index)?;
self.effective_fee_rate
.tx_index
.truncate_if_needed(starting_indexes.tx_index)?;
loop {
let skip = self.fee.tx_index.len();
let end = self.fee.tx_index.batch_end(target);
if skip >= end {
let start_tx = self.fee.tx_index.len();
let max_height = indexer.vecs.transactions.first_tx_index.len();
let start_height = if start_tx == 0 {
0
} else {
indexer
.vecs
.transactions
.height
.collect_one_at(start_tx)
.unwrap()
.to_usize()
};
for h in start_height..max_height {
let first_tx: usize = indexer
.vecs
.transactions
.first_tx_index
.collect_one_at(h)
.unwrap()
.to_usize();
let n = *indexes.height.tx_index_count.collect_one_at(h).unwrap() as usize;
if first_tx + n > target {
break;
}
let input_batch = self.input_value.collect_range_at(skip, end);
let output_batch = self.output_value.collect_range_at(skip, end);
let vsize_batch = size_vecs.vsize.tx_index.collect_range_at(skip, end);
// Batch read all per-tx data for this block
let input_values = self.input_value.collect_range_at(first_tx, first_tx + n);
let output_values = self.output_value.collect_range_at(first_tx, first_tx + n);
let vsizes: Vec<VSize> = size_vecs
.vsize
.tx_index
.collect_range_at(first_tx, first_tx + n);
let txin_starts: Vec<TxInIndex> = indexer
.vecs
.transactions
.first_txin_index
.collect_range_at(first_tx, first_tx + n);
let input_begin = txin_starts[0].to_usize();
let input_end = if h + 1 < max_height {
indexer
.vecs
.inputs
.first_txin_index
.collect_one_at(h + 1)
.unwrap()
.to_usize()
} else {
indexer.vecs.inputs.outpoint.len()
};
let outpoints: Vec<OutPoint> = indexer
.vecs
.inputs
.outpoint
.collect_range_at(input_begin, input_end);
for j in 0..input_batch.len() {
let fee = if unlikely(input_batch[j].is_max()) {
// Compute fee + fee_rate per tx
let mut fees = Vec::with_capacity(n);
for j in 0..n {
let fee = if unlikely(input_values[j].is_max()) {
Sats::ZERO
} else {
input_batch[j] - output_batch[j]
input_values[j] - output_values[j]
};
self.fee.tx_index.push(fee);
self.fee_rate
.tx_index
.push(FeeRate::from((fee, vsize_batch[j])));
self.fee_rate.tx_index.push(FeeRate::from((fee, vsizes[j])));
fees.push(fee);
}
let _lock = exit.lock();
let (r1, r2) = rayon::join(
|| self.fee.tx_index.write(),
|| self.fee_rate.tx_index.write(),
// Effective fee rate via same-block CPFP clustering
let effective = cluster_fee_rates(
&txin_starts,
&outpoints,
input_begin,
first_tx,
&fees,
&vsizes,
);
r1?;
r2?;
for rate in effective {
self.effective_fee_rate.tx_index.push(rate);
}
if h % 1_000 == 0 {
let _lock = exit.lock();
self.fee.tx_index.write()?;
self.fee_rate.tx_index.write()?;
self.effective_fee_rate.tx_index.write()?;
}
}
let _lock = exit.lock();
self.fee.tx_index.write()?;
self.fee_rate.tx_index.write()?;
self.effective_fee_rate.tx_index.write()?;
Ok(())
}
}
/// Clusters same-block parent-child txs and computes effective fee rate per cluster.
fn cluster_fee_rates(
txin_starts: &[TxInIndex],
outpoints: &[OutPoint],
outpoint_base: usize,
first_tx: usize,
fees: &[Sats],
vsizes: &[VSize],
) -> Vec<FeeRate> {
let n = fees.len();
let mut parent: Vec<usize> = (0..n).collect();
for j in 1..n {
let start = txin_starts[j].to_usize() - outpoint_base;
let end = if j + 1 < txin_starts.len() {
txin_starts[j + 1].to_usize() - outpoint_base
} else {
outpoints.len()
};
for op in &outpoints[start..end] {
if op.is_coinbase() {
continue;
}
let parent_tx = op.tx_index().to_usize();
if parent_tx >= first_tx && parent_tx < first_tx + n {
union(&mut parent, j, parent_tx - first_tx);
}
}
}
let mut cluster_fee = vec![Sats::ZERO; n];
let mut cluster_vsize = vec![VSize::from(0u64); n];
for j in 0..n {
let root = find(&mut parent, j);
cluster_fee[root] += fees[j];
cluster_vsize[root] += vsizes[j];
}
(0..n)
.map(|j| {
let root = find(&mut parent, j);
FeeRate::from((cluster_fee[root], cluster_vsize[root]))
})
.collect()
}
fn find(parent: &mut [usize], mut i: usize) -> usize {
while parent[i] != i {
parent[i] = parent[parent[i]];
i = parent[i];
}
i
}
fn union(parent: &mut [usize], a: usize, b: usize) {
let ra = find(parent, a);
let rb = find(parent, b);
if ra != rb {
parent[ra] = rb;
}
}

View File

@@ -20,6 +20,12 @@ impl Vecs {
output_value: EagerVec::forced_import(db, "output_value", version)?,
fee: PerTxDistribution::forced_import(db, "fee", v, indexes)?,
fee_rate: PerTxDistribution::forced_import(db, "fee_rate", v, indexes)?,
effective_fee_rate: PerTxDistribution::forced_import(
db,
"effective_fee_rate",
v,
indexes,
)?,
})
}
}

View File

@@ -10,4 +10,5 @@ pub struct Vecs<M: StorageMode = Rw> {
pub output_value: M::Stored<EagerVec<PcoVec<TxIndex, Sats>>>,
pub fee: PerTxDistribution<Sats, M>,
pub fee_rate: PerTxDistribution<FeeRate, M>,
pub effective_fee_rate: PerTxDistribution<FeeRate, M>,
}

View File

@@ -12,7 +12,6 @@ exclude = ["examples/"]
bitcoin = { workspace = true }
brk_error = { workspace = true, features = ["fjall", "vecdb"] }
brk_cohort = { workspace = true }
brk_iterator = { workspace = true }
brk_logger = { workspace = true }
brk_reader = { workspace = true }
brk_rpc = { workspace = true, features = ["corepc"] }

View File

@@ -7,7 +7,6 @@ use std::{
use brk_alloc::Mimalloc;
use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use tracing::{debug, info};
@@ -33,9 +32,6 @@ fn main() -> color_eyre::Result<()> {
let reader = Reader::new(bitcoin_dir.join("blocks"), &client);
debug!("Reader created.");
let blocks = Blocks::new(&client, &reader);
debug!("Blocks created.");
let mut indexer = Indexer::forced_import(&outputs_dir)?;
debug!("Indexer imported.");
@@ -44,7 +40,7 @@ fn main() -> color_eyre::Result<()> {
loop {
let i = Instant::now();
indexer.checked_index(&blocks, &client, &exit)?;
indexer.checked_index(&reader, &client, &exit)?;
info!("Done in {:?}", i.elapsed());
Mimalloc::collect();

View File

@@ -9,7 +9,6 @@ use brk_alloc::Mimalloc;
use brk_bencher::Bencher;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use tracing::{debug, info};
@@ -33,8 +32,6 @@ fn main() -> Result<()> {
let reader = Reader::new(bitcoin_dir.join("blocks"), &client);
let blocks = Blocks::new(&client, &reader);
let mut indexer = Indexer::forced_import(&outputs_dir)?;
let mut bencher =
@@ -50,7 +47,7 @@ fn main() -> Result<()> {
});
let i = Instant::now();
indexer.index(&blocks, &client, &exit)?;
indexer.index(&reader, &client, &exit)?;
info!("Done in {:?}", i.elapsed());
// We want to benchmark the drop too

View File

@@ -9,7 +9,6 @@ use brk_alloc::Mimalloc;
use brk_bencher::Bencher;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use tracing::{debug, info};
@@ -33,8 +32,6 @@ fn main() -> Result<()> {
let reader = Reader::new(bitcoin_dir.join("blocks"), &client);
let blocks = Blocks::new(&client, &reader);
let mut indexer = Indexer::forced_import(&outputs_dir)?;
let mut bencher =
@@ -51,7 +48,7 @@ fn main() -> Result<()> {
loop {
let i = Instant::now();
indexer.index(&blocks, &client, &exit)?;
indexer.index(&reader, &client, &exit)?;
info!("Done in {:?}", i.elapsed());
Mimalloc::collect();

View File

@@ -4,7 +4,7 @@ use brk_types::{TxIndex, Txid, TxidPrefix, Version};
// One version for all data sources
// Increment on **change _OR_ addition**
pub const VERSION: Version = Version::new(25);
pub const VERSION: Version = Version::new(26);
pub const SNAPSHOT_BLOCK_RANGE: usize = 1_000;
/// Known duplicate Bitcoin transactions (BIP30)

View File

@@ -8,12 +8,14 @@ use std::{
};
use brk_error::Result;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::Client;
use brk_types::Height;
use fjall::PersistMode;
use tracing::{debug, info};
use vecdb::{Exit, RawDBError, ReadOnlyClone, ReadableVec, Ro, Rw, StorageMode};
use vecdb::{
Exit, RawDBError, ReadOnlyClone, ReadableVec, Ro, Rw, StorageMode, WritableVec, unlikely,
};
mod constants;
mod indexes;
mod processor;
@@ -93,22 +95,22 @@ impl Indexer {
}
}
pub fn index(&mut self, blocks: &Blocks, client: &Client, exit: &Exit) -> Result<Indexes> {
self.index_(blocks, client, exit, false)
pub fn index(&mut self, reader: &Reader, client: &Client, exit: &Exit) -> Result<Indexes> {
self.index_(reader, client, exit, false)
}
pub fn checked_index(
&mut self,
blocks: &Blocks,
reader: &Reader,
client: &Client,
exit: &Exit,
) -> Result<Indexes> {
self.index_(blocks, client, exit, true)
self.index_(reader, client, exit, true)
}
fn index_(
&mut self,
blocks: &Blocks,
reader: &Reader,
client: &Client,
exit: &Exit,
check_collisions: bool,
@@ -172,13 +174,13 @@ impl Indexer {
let stores_res = s.spawn(|| -> Result<()> {
let i = Instant::now();
stores.commit(height)?;
info!("Stores exported in {:?}", i.elapsed());
debug!("Stores exported in {:?}", i.elapsed());
Ok(())
});
let vecs_res = s.spawn(|| -> Result<()> {
let i = Instant::now();
vecs.flush(height)?;
info!("Vecs exported in {:?}", i.elapsed());
debug!("Vecs exported in {:?}", i.elapsed());
Ok(())
});
stores_res.join().unwrap()?;
@@ -195,13 +197,22 @@ impl Indexer {
let vecs = &mut self.vecs;
let stores = &mut self.stores;
for block in blocks.after(prev_hash)? {
for block in reader.after(prev_hash)?.iter() {
let height = block.height();
info!("Indexing block {height}...");
if unlikely(height.is_multiple_of(100)) {
info!("Indexing block {height}...");
} else {
debug!("Indexing block {height}...");
}
indexes.height = height;
vecs.blocks.position.push(block.metadata().position());
block.tx_metadata().iter().for_each(|m| {
vecs.transactions.position.push(m.position());
});
let mut processor = BlockProcessor {
block: &block,
height,
@@ -271,13 +282,13 @@ impl Indexer {
for task in tasks {
task().map_err(vecdb::RawDBError::other)?;
}
info!("Stores committed in {:?}", i.elapsed());
debug!("Stores committed in {:?}", i.elapsed());
let i = Instant::now();
fjall_db
.persist(PersistMode::SyncData)
.map_err(RawDBError::other)?;
info!("Stores persisted in {:?}", i.elapsed());
debug!("Stores persisted in {:?}", i.elapsed());
}
db.compact()?;

View File

@@ -28,14 +28,14 @@ impl BlockProcessor<'_> {
.blockhash_prefix_to_height
.insert(blockhash_prefix, height);
self.stores
.height_to_coinbase_tag
.insert(height, self.block.coinbase_tag().into());
self.vecs
.blocks
.blockhash
.checked_push(height, blockhash.clone())?;
self.vecs
.blocks
.coinbase_tag
.checked_push(height, self.block.coinbase_tag())?;
self.vecs
.blocks
.difficulty
@@ -53,21 +53,28 @@ impl BlockProcessor<'_> {
pub fn push_block_size_and_weight(&mut self, txs: &[ComputedTx]) -> Result<()> {
let overhead = bitcoin::block::Header::SIZE + bitcoin::VarInt::from(txs.len()).size();
let mut total_size = overhead;
let mut weight_wu = overhead * 4;
for ct in txs {
let base = ct.base_size as usize;
let total = ct.total_size as usize;
total_size += total;
weight_wu += base * 3 + total;
let mut weight = overhead * 4;
let mut sw_txs = 0u32;
let mut sw_size = 0usize;
let mut sw_weight = 0usize;
for (i, tx) in txs.iter().enumerate() {
total_size += tx.total_size as usize;
weight += tx.weight();
if i > 0 && tx.is_segwit() {
sw_txs += 1;
sw_size += tx.total_size as usize;
sw_weight += tx.weight();
}
}
self.vecs
.blocks
.total
.checked_push(self.height, total_size.into())?;
self.vecs
.blocks
.weight
.checked_push(self.height, weight_wu.into())?;
let h = self.height;
let blocks = &mut self.vecs.blocks;
blocks.total.checked_push(h, total_size.into())?;
blocks.weight.checked_push(h, weight.into())?;
blocks.segwit_txs.checked_push(h, sw_txs.into())?;
blocks.segwit_size.checked_push(h, sw_size.into())?;
blocks.segwit_weight.checked_push(h, sw_weight.into())?;
Ok(())
}
}

View File

@@ -48,6 +48,18 @@ pub struct ComputedTx<'a> {
pub total_size: u32,
}
impl ComputedTx<'_> {
#[inline]
pub fn is_segwit(&self) -> bool {
self.base_size != self.total_size
}
#[inline]
pub fn weight(&self) -> usize {
self.base_size as usize * 3 + self.total_size as usize
}
}
/// Reusable buffers cleared and refilled each block to avoid allocation churn.
#[derive(Default)]
pub struct BlockBuffers {

View File

@@ -7,11 +7,11 @@ use brk_error::Result;
use brk_store::{AnyStore, Kind, Mode, Store};
use brk_types::{
AddrHash, AddrIndexOutPoint, AddrIndexTxIndex, BlockHashPrefix, Height, OutPoint, OutputType,
StoredString, TxIndex, TxOutIndex, TxidPrefix, TypeIndex, Unit, Version, Vout,
TxIndex, TxOutIndex, TxidPrefix, TypeIndex, Unit, Version, Vout,
};
use fjall::{Database, PersistMode};
use rayon::prelude::*;
use tracing::info;
use tracing::{debug, info};
use vecdb::{AnyVec, ReadableVec, VecIndex};
use crate::{Indexes, constants::DUPLICATE_TXID_PREFIXES};
@@ -26,7 +26,6 @@ pub struct Stores {
pub addr_type_to_addr_index_and_tx_index: ByAddrType<Store<AddrIndexTxIndex, Unit>>,
pub addr_type_to_addr_index_and_unspent_outpoint: ByAddrType<Store<AddrIndexOutPoint, Unit>>,
pub blockhash_prefix_to_height: Store<BlockHashPrefix, Height>,
pub height_to_coinbase_tag: Store<Height, StoredString>,
pub txid_prefix_to_tx_index: Store<TxidPrefix, TxIndex>,
}
@@ -88,14 +87,6 @@ impl Stores {
Ok(Self {
db: database.clone(),
height_to_coinbase_tag: Store::import(
database_ref,
path,
"height_to_coinbase_tag",
version,
Mode::PushOnly,
Kind::Sequential,
)?,
addr_type_to_addr_hash_to_addr_index: ByAddrType::new_with_index(
create_addr_hash_to_addr_index_store,
)?,
@@ -135,7 +126,6 @@ impl Stores {
fn iter_any(&self) -> impl Iterator<Item = &dyn AnyStore> {
[
&self.blockhash_prefix_to_height as &dyn AnyStore,
&self.height_to_coinbase_tag,
&self.txid_prefix_to_tx_index,
]
.into_iter()
@@ -159,7 +149,6 @@ impl Stores {
fn par_iter_any_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStore> {
[
&mut self.blockhash_prefix_to_height as &mut dyn AnyStore,
&mut self.height_to_coinbase_tag,
&mut self.txid_prefix_to_tx_index,
]
.into_par_iter()
@@ -184,11 +173,11 @@ impl Stores {
let i = Instant::now();
self.par_iter_any_mut()
.try_for_each(|store| store.commit(height))?;
info!("Stores committed in {:?}", i.elapsed());
debug!("Stores committed in {:?}", i.elapsed());
let i = Instant::now();
self.db.persist(PersistMode::SyncData)?;
info!("Stores persisted in {:?}", i.elapsed());
debug!("Stores persisted in {:?}", i.elapsed());
Ok(())
}
@@ -210,7 +199,6 @@ impl Stores {
}
take!(self.blockhash_prefix_to_height);
take!(self.height_to_coinbase_tag);
take!(self.txid_prefix_to_tx_index);
for store in self.addr_type_to_addr_hash_to_addr_index.values_mut() {
@@ -257,7 +245,6 @@ impl Stores {
fn is_empty(&self) -> Result<bool> {
Ok(self.blockhash_prefix_to_height.is_empty()?
&& self.txid_prefix_to_tx_index.is_empty()?
&& self.height_to_coinbase_tag.is_empty()?
&& self
.addr_type_to_addr_hash_to_addr_index
.values()
@@ -286,12 +273,6 @@ impl Stores {
},
);
(starting_indexes.height.to_usize()..vecs.blocks.blockhash.len())
.map(Height::from)
.for_each(|h| {
self.height_to_coinbase_tag.remove(h);
});
for addr_type in OutputType::ADDR_TYPES {
for hash in vecs.iter_addr_hashes_from(addr_type, starting_indexes.height)? {
self.addr_type_to_addr_hash_to_addr_index

View File

@@ -1,6 +1,9 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BlockHash, Height, StoredF64, StoredU64, Timestamp, Version, Weight};
use brk_types::{
BlkPosition, BlockHash, CoinbaseTag, Height, StoredF64, StoredU32, StoredU64, Timestamp,
Version, Weight,
};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, BytesVec, Database, ImportableVec, PcoVec, Rw, Stamp, StorageMode, WritableVec,
@@ -11,6 +14,7 @@ use crate::parallel_import;
#[derive(Traversable)]
pub struct BlocksVecs<M: StorageMode = Rw> {
pub blockhash: M::Stored<BytesVec<Height, BlockHash>>,
pub coinbase_tag: M::Stored<BytesVec<Height, CoinbaseTag>>,
#[traversable(wrap = "difficulty", rename = "value")]
pub difficulty: M::Stored<PcoVec<Height, StoredF64>>,
/// Doesn't guarantee continuity due to possible reorgs and more generally the nature of mining
@@ -20,45 +24,85 @@ pub struct BlocksVecs<M: StorageMode = Rw> {
pub total: M::Stored<PcoVec<Height, StoredU64>>,
#[traversable(wrap = "weight", rename = "base")]
pub weight: M::Stored<PcoVec<Height, Weight>>,
#[traversable(hidden)]
pub position: M::Stored<PcoVec<Height, BlkPosition>>,
pub segwit_txs: M::Stored<PcoVec<Height, StoredU32>>,
pub segwit_size: M::Stored<PcoVec<Height, StoredU64>>,
pub segwit_weight: M::Stored<PcoVec<Height, Weight>>,
}
impl BlocksVecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
let (blockhash, difficulty, timestamp, total, weight) = parallel_import! {
blockhash = BytesVec::forced_import(db, "blockhash", version),
difficulty = PcoVec::forced_import(db, "difficulty", version),
timestamp = PcoVec::forced_import(db, "timestamp", version),
total_size = PcoVec::forced_import(db, "total_size", version),
weight = PcoVec::forced_import(db, "block_weight", version),
};
Ok(Self {
let (
blockhash,
coinbase_tag,
difficulty,
timestamp,
total,
weight,
position,
segwit_txs,
segwit_size,
segwit_weight,
) = parallel_import! {
blockhash = BytesVec::forced_import(db, "blockhash", version),
coinbase_tag = BytesVec::forced_import(db, "coinbase_tag", version),
difficulty = PcoVec::forced_import(db, "difficulty", version),
timestamp = PcoVec::forced_import(db, "timestamp", version),
total_size = PcoVec::forced_import(db, "total_size", version),
weight = PcoVec::forced_import(db, "block_weight", version),
position = PcoVec::forced_import(db, "block_position", version),
segwit_txs = PcoVec::forced_import(db, "segwit_txs", version),
segwit_size = PcoVec::forced_import(db, "segwit_size", version),
segwit_weight = PcoVec::forced_import(db, "segwit_weight", version),
};
Ok(Self {
blockhash,
coinbase_tag,
difficulty,
timestamp,
total,
weight,
position,
segwit_txs,
segwit_size,
segwit_weight,
})
}
pub fn truncate(&mut self, height: Height, stamp: Stamp) -> Result<()> {
self.blockhash
.truncate_if_needed_with_stamp(height, stamp)?;
self.coinbase_tag
.truncate_if_needed_with_stamp(height, stamp)?;
self.difficulty
.truncate_if_needed_with_stamp(height, stamp)?;
self.timestamp
.truncate_if_needed_with_stamp(height, stamp)?;
self.total.truncate_if_needed_with_stamp(height, stamp)?;
self.weight.truncate_if_needed_with_stamp(height, stamp)?;
self.position.truncate_if_needed_with_stamp(height, stamp)?;
self.segwit_txs
.truncate_if_needed_with_stamp(height, stamp)?;
self.segwit_size
.truncate_if_needed_with_stamp(height, stamp)?;
self.segwit_weight
.truncate_if_needed_with_stamp(height, stamp)?;
Ok(())
}
pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.blockhash as &mut dyn AnyStoredVec,
&mut self.coinbase_tag,
&mut self.difficulty,
&mut self.timestamp,
&mut self.total,
&mut self.weight,
&mut self.position,
&mut self.segwit_txs,
&mut self.segwit_size,
&mut self.segwit_weight,
]
.into_par_iter()
}

View File

@@ -1,8 +1,8 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Height, RawLockTime, StoredBool, StoredU32, TxInIndex, TxIndex, TxOutIndex, TxVersion, Txid,
Version,
BlkPosition, Height, RawLockTime, StoredBool, StoredU32, TxInIndex, TxIndex, TxOutIndex,
TxVersion, Txid, Version,
};
use rayon::prelude::*;
use vecdb::{
@@ -23,6 +23,8 @@ pub struct TransactionsVecs<M: StorageMode = Rw> {
pub is_explicitly_rbf: M::Stored<PcoVec<TxIndex, StoredBool>>,
pub first_txin_index: M::Stored<PcoVec<TxIndex, TxInIndex>>,
pub first_txout_index: M::Stored<BytesVec<TxIndex, TxOutIndex>>,
#[traversable(hidden)]
pub position: M::Stored<PcoVec<TxIndex, BlkPosition>>,
}
pub struct TxMetadataVecs<'a> {
@@ -70,6 +72,7 @@ impl TransactionsVecs {
is_explicitly_rbf,
first_txin_index,
first_txout_index,
position,
) = parallel_import! {
first_tx_index = PcoVec::forced_import(db, "first_tx_index", version),
height = PcoVec::forced_import(db, "height", version),
@@ -81,6 +84,7 @@ impl TransactionsVecs {
is_explicitly_rbf = PcoVec::forced_import(db, "is_explicitly_rbf", version),
first_txin_index = PcoVec::forced_import(db, "first_txin_index", version),
first_txout_index = BytesVec::forced_import(db, "first_txout_index", version),
position = PcoVec::forced_import(db, "tx_position", version),
};
Ok(Self {
first_tx_index,
@@ -93,6 +97,7 @@ impl TransactionsVecs {
is_explicitly_rbf,
first_txin_index,
first_txout_index,
position,
})
}
@@ -115,6 +120,8 @@ impl TransactionsVecs {
.truncate_if_needed_with_stamp(tx_index, stamp)?;
self.first_txout_index
.truncate_if_needed_with_stamp(tx_index, stamp)?;
self.position
.truncate_if_needed_with_stamp(tx_index, stamp)?;
Ok(())
}
@@ -130,6 +137,7 @@ impl TransactionsVecs {
&mut self.is_explicitly_rbf,
&mut self.first_txin_index,
&mut self.first_txout_index,
&mut self.position,
]
.into_par_iter()
}

View File

@@ -1,4 +1,4 @@
use brk_types::{FeeRate, MempoolEntryInfo, Sats, Txid, TxidPrefix, VSize};
use brk_types::{FeeRate, MempoolEntryInfo, Sats, Timestamp, Txid, TxidPrefix, VSize};
use smallvec::SmallVec;
/// A mempool transaction entry.
@@ -16,6 +16,8 @@ pub struct Entry {
pub ancestor_vsize: VSize,
/// Parent txid prefixes (most txs have 0-2 parents)
pub depends: SmallVec<[TxidPrefix; 2]>,
/// When this tx was first seen in the mempool
pub first_seen: Timestamp,
}
impl Entry {
@@ -27,6 +29,7 @@ impl Entry {
ancestor_fee: info.ancestor_fee,
ancestor_vsize: VSize::from(info.ancestor_size),
depends: info.depends.iter().map(TxidPrefix::from).collect(),
first_seen: Timestamp::now(),
}
}

View File

@@ -106,6 +106,10 @@ impl MempoolInner {
self.txs.read()
}
pub fn get_entries(&self) -> RwLockReadGuard<'_, EntryPool> {
self.entries.read()
}
pub fn get_addrs(&self) -> RwLockReadGuard<'_, AddrTracker> {
self.addrs.read()
}

View File

@@ -1,20 +1,39 @@
use brk_types::{TxWithHex, Txid};
use brk_types::{MempoolRecentTx, TxWithHex, Txid};
use derive_more::Deref;
use rustc_hash::FxHashMap;
const RECENT_CAP: usize = 10;
/// Store of full transaction data for API access.
#[derive(Default, Deref)]
pub struct TxStore(FxHashMap<Txid, TxWithHex>);
pub struct TxStore {
#[deref]
txs: FxHashMap<Txid, TxWithHex>,
recent: Vec<MempoolRecentTx>,
}
impl TxStore {
/// Check if a transaction exists.
pub fn contains(&self, txid: &Txid) -> bool {
self.0.contains_key(txid)
self.txs.contains_key(txid)
}
/// Add transactions in bulk.
pub fn extend(&mut self, txs: FxHashMap<Txid, TxWithHex>) {
self.0.extend(txs);
let mut new: Vec<_> = txs
.iter()
.take(RECENT_CAP)
.map(|(txid, tx_hex)| MempoolRecentTx::from((txid, tx_hex.tx())))
.collect();
let keep = RECENT_CAP.saturating_sub(new.len());
new.extend(self.recent.drain(..keep.min(self.recent.len())));
self.recent = new;
self.txs.extend(txs);
}
/// Last 10 transactions to enter the mempool.
pub fn recent(&self) -> &[MempoolRecentTx] {
&self.recent
}
/// Keep items matching predicate, call `on_remove` for each removed item.
@@ -23,7 +42,7 @@ impl TxStore {
K: FnMut(&Txid) -> bool,
R: FnMut(&Txid, &TxWithHex),
{
self.0.retain(|txid, tx| {
self.txs.retain(|txid, tx| {
if keep(txid) {
true
} else {

View File

@@ -1,10 +1,17 @@
use bitcoin::consensus::Decodable;
use bitcoin::hex::DisplayHex;
use brk_error::{Error, Result};
use brk_types::{BlockHash, BlockHashPrefix, BlockInfo, Height, TxIndex};
use brk_types::{
BlockExtras, BlockHash, BlockHashPrefix, BlockHeader, BlockInfo, BlockInfoV1, BlockPool,
FeeRate, Height, Sats, Timestamp, TxIndex, VSize, pools,
};
use vecdb::{AnyVec, ReadableVec, VecIndex};
use crate::Query;
const DEFAULT_BLOCK_COUNT: u32 = 10;
const DEFAULT_V1_BLOCK_COUNT: u32 = 15;
const HEADER_SIZE: usize = 80;
impl Query {
pub fn block(&self, hash: &BlockHash) -> Result<BlockInfo> {
@@ -13,58 +20,70 @@ impl Query {
}
pub fn block_by_height(&self, height: Height) -> Result<BlockInfo> {
let indexer = self.indexer();
let max_height = self.max_height();
if height > max_height {
return Err(Error::OutOfRange("Block height out of range".into()));
}
self.blocks_range(height.to_usize(), height.to_usize() + 1)?
.pop()
.ok_or(Error::NotFound("Block not found".into()))
}
let blockhash = indexer.vecs.blocks.blockhash.read_once(height)?;
let difficulty = indexer.vecs.blocks.difficulty.collect_one(height).unwrap();
let timestamp = indexer.vecs.blocks.timestamp.collect_one(height).unwrap();
let size = indexer.vecs.blocks.total.collect_one(height).unwrap();
let weight = indexer.vecs.blocks.weight.collect_one(height).unwrap();
let tx_count = self.tx_count_at_height(height, max_height)?;
pub fn block_by_height_v1(&self, height: Height) -> Result<BlockInfoV1> {
let max_height = self.max_height();
if height > max_height {
return Err(Error::OutOfRange("Block height out of range".into()));
}
self.blocks_v1_range(height.to_usize(), height.to_usize() + 1)?
.pop()
.ok_or(Error::NotFound("Block not found".into()))
}
Ok(BlockInfo {
id: blockhash,
height,
tx_count,
size: *size,
weight,
timestamp,
difficulty: *difficulty,
})
pub fn block_header_hex(&self, hash: &BlockHash) -> Result<String> {
let height = self.height_by_hash(hash)?;
let header = self.read_block_header(height)?;
Ok(bitcoin::consensus::encode::serialize_hex(&header))
}
pub fn block_hash_by_height(&self, height: Height) -> Result<BlockHash> {
let max_height = self.max_height();
if height > max_height {
return Err(Error::OutOfRange("Block height out of range".into()));
}
Ok(self.indexer().vecs.blocks.blockhash.read_once(height)?)
}
pub fn blocks(&self, start_height: Option<Height>) -> Result<Vec<BlockInfo>> {
let max_height = self.indexed_height();
let (begin, end) = self.resolve_block_range(start_height, DEFAULT_BLOCK_COUNT);
self.blocks_range(begin, end)
}
let start = start_height.unwrap_or(max_height);
let start = start.min(max_height);
pub fn blocks_v1(&self, start_height: Option<Height>) -> Result<Vec<BlockInfoV1>> {
let (begin, end) = self.resolve_block_range(start_height, DEFAULT_V1_BLOCK_COUNT);
self.blocks_v1_range(begin, end)
}
let start_u32: u32 = start.into();
let count = DEFAULT_BLOCK_COUNT.min(start_u32 + 1) as usize;
// === Range queries (bulk reads) ===
if count == 0 {
fn blocks_range(&self, begin: usize, end: usize) -> Result<Vec<BlockInfo>> {
if begin >= end {
return Ok(Vec::new());
}
let indexer = self.indexer();
let computer = self.computer();
let reader = self.reader();
// Batch-read all PcoVec data for the contiguous range (avoids
// per-block page decompression — 4 reads instead of 4*count).
let end = start_u32 as usize + 1;
let begin = end - count;
// Bulk read all indexed data
let blockhashes = indexer.vecs.blocks.blockhash.collect_range_at(begin, end);
let difficulties = indexer.vecs.blocks.difficulty.collect_range_at(begin, end);
let timestamps = indexer.vecs.blocks.timestamp.collect_range_at(begin, end);
let sizes = indexer.vecs.blocks.total.collect_range_at(begin, end);
let weights = indexer.vecs.blocks.weight.collect_range_at(begin, end);
let positions = indexer.vecs.blocks.position.collect_range_at(begin, end);
// Batch-read first_tx_index for tx_count computation (need one extra for next boundary)
// Bulk read tx indexes for tx_count
let max_height = self.indexed_height();
let tx_index_end = if end <= max_height.to_usize() {
end + 1
} else {
@@ -77,24 +96,39 @@ impl Query {
.collect_range_at(begin, tx_index_end);
let total_txs = computer.indexes.tx_index.identity.len();
// Bulk read median time window
let median_start = begin.saturating_sub(10);
let median_timestamps: Vec<Timestamp> = indexer
.vecs
.blocks
.timestamp
.collect_range_at(median_start, end);
let count = end - begin;
let mut blocks = Vec::with_capacity(count);
for i in (0..count).rev() {
let height = Height::from(begin + i);
let blockhash = indexer.vecs.blocks.blockhash.read_once(height)?;
let raw_header = reader.read_raw_bytes(positions[i], HEADER_SIZE)?;
let header = Self::decode_header(&raw_header)?;
let tx_count = if i + 1 < first_tx_indexes.len() {
first_tx_indexes[i + 1].to_usize() - first_tx_indexes[i].to_usize()
(first_tx_indexes[i + 1].to_usize() - first_tx_indexes[i].to_usize()) as u32
} else {
total_txs - first_tx_indexes[i].to_usize()
(total_txs - first_tx_indexes[i].to_usize()) as u32
};
let median_time =
Self::compute_median_time(&median_timestamps, begin + i, median_start);
blocks.push(BlockInfo {
id: blockhash,
height,
tx_count: tx_count as u32,
id: blockhashes[i].clone(),
height: Height::from(begin + i),
header,
timestamp: timestamps[i],
tx_count,
size: *sizes[i],
weight: weights[i],
timestamp: timestamps[i],
median_time,
difficulty: *difficulties[i],
});
}
@@ -102,13 +136,254 @@ impl Query {
Ok(blocks)
}
pub(crate) fn blocks_v1_range(&self, begin: usize, end: usize) -> Result<Vec<BlockInfoV1>> {
if begin >= end {
return Ok(Vec::new());
}
let count = end - begin;
let indexer = self.indexer();
let computer = self.computer();
let reader = self.reader();
let all_pools = pools();
// Bulk read all indexed data
let blockhashes = indexer.vecs.blocks.blockhash.collect_range_at(begin, end);
let difficulties = indexer.vecs.blocks.difficulty.collect_range_at(begin, end);
let timestamps = indexer.vecs.blocks.timestamp.collect_range_at(begin, end);
let sizes = indexer.vecs.blocks.total.collect_range_at(begin, end);
let weights = indexer.vecs.blocks.weight.collect_range_at(begin, end);
let positions = indexer.vecs.blocks.position.collect_range_at(begin, end);
let pool_slugs = computer.pools.pool.collect_range_at(begin, end);
// Bulk read tx indexes
let max_height = self.indexed_height();
let tx_index_end = if end <= max_height.to_usize() {
end + 1
} else {
end
};
let first_tx_indexes: Vec<TxIndex> = indexer
.vecs
.transactions
.first_tx_index
.collect_range_at(begin, tx_index_end);
let total_txs = computer.indexes.tx_index.identity.len();
// Bulk read segwit stats
let segwit_txs = indexer.vecs.blocks.segwit_txs.collect_range_at(begin, end);
let segwit_sizes = indexer.vecs.blocks.segwit_size.collect_range_at(begin, end);
let segwit_weights = indexer
.vecs
.blocks
.segwit_weight
.collect_range_at(begin, end);
// Bulk read extras data
let fee_sats = computer
.mining
.rewards
.fees
.block
.sats
.collect_range_at(begin, end);
let subsidy_sats = computer
.mining
.rewards
.subsidy
.block
.sats
.collect_range_at(begin, end);
let input_counts = computer.inputs.count.sum.collect_range_at(begin, end);
let output_counts = computer
.outputs
.count
.total
.sum
.collect_range_at(begin, end);
let utxo_set_sizes = computer
.outputs
.count
.unspent
.height
.collect_range_at(begin, end);
let input_volumes = computer
.transactions
.volume
.transfer_volume
.block
.sats
.collect_range_at(begin, end);
let output_volumes = computer
.mining
.rewards
.output_volume
.collect_range_at(begin, end);
// Bulk read effective fee rate distribution (accounts for CPFP)
let frd = &computer
.transactions
.fees
.effective_fee_rate
.distribution
.block;
let fr_min = frd.min.height.collect_range_at(begin, end);
let fr_pct10 = frd.pct10.height.collect_range_at(begin, end);
let fr_pct25 = frd.pct25.height.collect_range_at(begin, end);
let fr_median = frd.median.height.collect_range_at(begin, end);
let fr_pct75 = frd.pct75.height.collect_range_at(begin, end);
let fr_pct90 = frd.pct90.height.collect_range_at(begin, end);
let fr_max = frd.max.height.collect_range_at(begin, end);
// Bulk read fee amount distribution (sats)
let fad = &computer.transactions.fees.fee.distribution.block;
let fa_min = fad.min.height.collect_range_at(begin, end);
let fa_pct10 = fad.pct10.height.collect_range_at(begin, end);
let fa_pct25 = fad.pct25.height.collect_range_at(begin, end);
let fa_median = fad.median.height.collect_range_at(begin, end);
let fa_pct75 = fad.pct75.height.collect_range_at(begin, end);
let fa_pct90 = fad.pct90.height.collect_range_at(begin, end);
let fa_max = fad.max.height.collect_range_at(begin, end);
// Bulk read tx positions range covering all coinbase txs (first tx of each block)
let tx_pos_begin = first_tx_indexes[0].to_usize();
let tx_pos_end = first_tx_indexes[count - 1].to_usize() + 1;
let all_tx_positions = indexer
.vecs
.transactions
.position
.collect_range_at(tx_pos_begin, tx_pos_end);
// Bulk read median time window
let median_start = begin.saturating_sub(10);
let median_timestamps = indexer
.vecs
.blocks
.timestamp
.collect_range_at(median_start, end);
let mut blocks = Vec::with_capacity(count);
for i in (0..count).rev() {
let raw_header = reader.read_raw_bytes(positions[i], HEADER_SIZE)?;
let header = Self::decode_header(&raw_header)?;
let tx_count = if i + 1 < first_tx_indexes.len() {
(first_tx_indexes[i + 1].to_usize() - first_tx_indexes[i].to_usize()) as u32
} else {
(total_txs - first_tx_indexes[i].to_usize()) as u32
};
let weight = weights[i];
let size = *sizes[i];
let total_fees = fee_sats[i];
let subsidy = subsidy_sats[i];
let total_inputs = (*input_counts[i]).saturating_sub(1);
let total_outputs = *output_counts[i];
let vsize = weight.to_vbytes_ceil();
let total_fees_u64 = u64::from(total_fees);
let non_coinbase = tx_count.saturating_sub(1) as u64;
let pool_slug = pool_slugs[i];
let pool = all_pools.get(pool_slug);
let (
coinbase_raw,
coinbase_address,
coinbase_addresses,
coinbase_signature,
coinbase_signature_ascii,
) = Self::parse_coinbase_tx(
reader,
all_tx_positions[first_tx_indexes[i].to_usize() - tx_pos_begin],
);
let median_time =
Self::compute_median_time(&median_timestamps, begin + i, median_start);
let info = BlockInfo {
id: blockhashes[i].clone(),
height: Height::from(begin + i),
header,
timestamp: timestamps[i],
tx_count,
size,
weight,
median_time,
difficulty: *difficulties[i],
};
let total_input_amt = input_volumes[i];
let total_output_amt = output_volumes[i];
let extras = BlockExtras {
total_fees,
median_fee: fr_median[i],
fee_range: [
fr_min[i],
fr_pct10[i],
fr_pct25[i],
fr_median[i],
fr_pct75[i],
fr_pct90[i],
fr_max[i],
],
reward: subsidy + total_fees,
pool: BlockPool {
id: pool.unique_id(),
name: pool.name.to_string(),
slug: pool_slug,
},
avg_fee: Sats::from(if non_coinbase > 0 {
total_fees_u64 / non_coinbase
} else {
0
}),
avg_fee_rate: FeeRate::from((total_fees, VSize::from(vsize))),
coinbase_raw,
coinbase_address,
coinbase_addresses,
coinbase_signature,
coinbase_signature_ascii,
avg_tx_size: if tx_count > 0 {
size as f64 / tx_count as f64
} else {
0.0
},
total_inputs,
total_outputs,
total_output_amt,
median_fee_amt: fa_median[i],
fee_percentiles: [
fa_min[i],
fa_pct10[i],
fa_pct25[i],
fa_median[i],
fa_pct75[i],
fa_pct90[i],
fa_max[i],
],
segwit_total_txs: *segwit_txs[i],
segwit_total_size: *segwit_sizes[i],
segwit_total_weight: segwit_weights[i],
header: raw_header.to_lower_hex_string(),
utxo_set_change: total_outputs as i64 - total_inputs as i64,
utxo_set_size: *utxo_set_sizes[i],
total_input_amt,
virtual_size: vsize as f64,
};
blocks.push(BlockInfoV1 { info, extras });
}
Ok(blocks)
}
// === Helper methods ===
pub fn height_by_hash(&self, hash: &BlockHash) -> Result<Height> {
let indexer = self.indexer();
let prefix = BlockHashPrefix::from(hash);
indexer
.stores
.blockhash_prefix_to_height
@@ -117,31 +392,103 @@ impl Query {
.ok_or(Error::NotFound("Block not found".into()))
}
pub fn read_block_header(&self, height: Height) -> Result<bitcoin::block::Header> {
let position = self
.indexer()
.vecs
.blocks
.position
.collect_one(height)
.unwrap();
let raw = self.reader().read_raw_bytes(position, HEADER_SIZE)?;
bitcoin::block::Header::consensus_decode(&mut raw.as_slice())
.map_err(|_| Error::Internal("Failed to decode block header"))
}
fn max_height(&self) -> Height {
Height::from(self.indexer().vecs.blocks.blockhash.len().saturating_sub(1))
}
fn tx_count_at_height(&self, height: Height, max_height: Height) -> Result<u32> {
let indexer = self.indexer();
let computer = self.computer();
fn resolve_block_range(&self, start_height: Option<Height>, count: u32) -> (usize, usize) {
let max_height = self.height();
let start = start_height.unwrap_or(max_height).min(max_height);
let start_u32: u32 = start.into();
let count = count.min(start_u32 + 1) as usize;
let end = start_u32 as usize + 1;
let begin = end - count;
(begin, end)
}
let first_tx_index = indexer
.vecs
.transactions
.first_tx_index
.collect_one(height)
.unwrap();
let next_first_tx_index = if height < max_height {
indexer
.vecs
.transactions
.first_tx_index
.collect_one(height.incremented())
.unwrap()
} else {
TxIndex::from(computer.indexes.tx_index.identity.len())
fn decode_header(bytes: &[u8]) -> Result<BlockHeader> {
let raw = bitcoin::block::Header::consensus_decode(&mut &bytes[..])
.map_err(|_| Error::Internal("Failed to decode block header"))?;
Ok(BlockHeader::from(raw))
}
fn compute_median_time(
all_timestamps: &[Timestamp],
height: usize,
window_start: usize,
) -> Timestamp {
let rel_start = height.saturating_sub(10) - window_start;
let rel_end = height + 1 - window_start;
let mut sorted: Vec<usize> = all_timestamps[rel_start..rel_end]
.iter()
.map(|t| usize::from(*t))
.collect();
sorted.sort_unstable();
Timestamp::from(sorted[sorted.len() / 2])
}
fn parse_coinbase_tx(
reader: &brk_reader::Reader,
position: brk_types::BlkPosition,
) -> (String, Option<String>, Vec<String>, String, String) {
let raw_bytes = match reader.read_raw_bytes(position, 1000) {
Ok(bytes) => bytes,
Err(_) => return (String::new(), None, vec![], String::new(), String::new()),
};
Ok((next_first_tx_index.to_usize() - first_tx_index.to_usize()) as u32)
let tx = match bitcoin::Transaction::consensus_decode(&mut raw_bytes.as_slice()) {
Ok(tx) => tx,
Err(_) => return (String::new(), None, vec![], String::new(), String::new()),
};
let coinbase_raw = tx
.input
.first()
.map(|input| input.script_sig.as_bytes().to_lower_hex_string())
.unwrap_or_default();
let coinbase_signature_ascii = tx
.input
.first()
.map(|input| input.script_sig.as_bytes().iter().map(|&b| b as char).collect::<String>())
.unwrap_or_default();
let coinbase_addresses: Vec<String> = tx
.output
.iter()
.filter_map(|output| {
bitcoin::Address::from_script(&output.script_pubkey, bitcoin::Network::Bitcoin)
.ok()
.map(|a| a.to_string())
})
.collect();
let coinbase_address = coinbase_addresses.first().cloned();
let coinbase_signature = tx
.output
.first()
.map(|output| output.script_pubkey.to_asm_string())
.unwrap_or_default();
(
coinbase_raw,
coinbase_address,
coinbase_addresses,
coinbase_signature,
coinbase_signature_ascii,
)
}
}

View File

@@ -12,7 +12,6 @@ impl Query {
fn block_raw_by_height(&self, height: Height) -> Result<Vec<u8>> {
let indexer = self.indexer();
let computer = self.computer();
let reader = self.reader();
let max_height = Height::from(indexer.vecs.blocks.blockhash.len().saturating_sub(1));
@@ -20,7 +19,7 @@ impl Query {
return Err(Error::OutOfRange("Block height out of range".into()));
}
let position = computer.positions.block.collect_one(height).unwrap();
let position = indexer.vecs.blocks.position.collect_one(height).unwrap();
let size = indexer.vecs.blocks.total.collect_one(height).unwrap();
reader.read_raw_bytes(position, *size as usize)

View File

@@ -23,7 +23,7 @@ impl Query {
// === Helper methods ===
fn block_txids_by_height(&self, height: Height) -> Result<Vec<Txid>> {
pub(crate) fn block_txids_by_height(&self, height: Height) -> Result<Vec<Txid>> {
let indexer = self.indexer();
let max_height = self.indexed_height();

View File

@@ -1,5 +1,8 @@
use brk_error::{Error, Result};
use brk_types::{MempoolBlock, MempoolInfo, RecommendedFees, Txid};
use brk_types::{
CpfpEntry, CpfpInfo, MempoolBlock, MempoolInfo, MempoolRecentTx, RecommendedFees, Txid,
TxidParam, TxidPrefix, Weight,
};
use crate::Query;
@@ -40,4 +43,67 @@ impl Query {
Ok(blocks)
}
pub fn mempool_recent(&self) -> Result<Vec<MempoolRecentTx>> {
let mempool = self.mempool().ok_or(Error::MempoolNotAvailable)?;
Ok(mempool.get_txs().recent().to_vec())
}
pub fn cpfp(&self, TxidParam { txid }: TxidParam) -> Result<CpfpInfo> {
let mempool = self.mempool().ok_or(Error::MempoolNotAvailable)?;
let entries = mempool.get_entries();
let prefix = TxidPrefix::from(&txid);
let entry = entries
.get(&prefix)
.ok_or(Error::NotFound("Transaction not in mempool".into()))?;
// Ancestors: walk up the depends chain
let mut ancestors = Vec::new();
let mut stack: Vec<TxidPrefix> = entry.depends.to_vec();
while let Some(p) = stack.pop() {
if let Some(anc) = entries.get(&p) {
ancestors.push(CpfpEntry {
txid: anc.txid.clone(),
weight: Weight::from(anc.vsize),
fee: anc.fee,
});
stack.extend(anc.depends.iter().cloned());
}
}
// Descendants: find entries that depend on this tx's prefix
let mut descendants = Vec::new();
for e in entries.entries().iter().flatten() {
if e.depends.contains(&prefix) {
descendants.push(CpfpEntry {
txid: e.txid.clone(),
weight: Weight::from(e.vsize),
fee: e.fee,
});
}
}
let effective_fee_per_vsize = entry.effective_fee_rate();
Ok(CpfpInfo {
ancestors,
descendants,
effective_fee_per_vsize,
})
}
pub fn transaction_times(&self, txids: &[Txid]) -> Result<Vec<u64>> {
let mempool = self.mempool().ok_or(Error::MempoolNotAvailable)?;
let entries = mempool.get_entries();
Ok(txids
.iter()
.map(|txid| {
entries
.get(&TxidPrefix::from(txid))
.map(|e| usize::from(e.first_seen) as u64)
.unwrap_or(0)
})
.collect())
}
}

View File

@@ -1,7 +1,7 @@
use brk_error::{Error, Result};
use brk_types::{
Height, PoolBlockCounts, PoolBlockShares, PoolDetail, PoolDetailInfo, PoolInfo, PoolSlug,
PoolStats, PoolsSummary, TimePeriod, pools,
BlockInfoV1, Height, PoolBlockCounts, PoolBlockShares, PoolDetail, PoolDetailInfo,
PoolHashrateEntry, PoolInfo, PoolSlug, PoolStats, PoolsSummary, TimePeriod, pools,
};
use vecdb::{AnyVec, ReadableVec, VecIndex};
@@ -177,4 +177,132 @@ impl Query {
reported_hashrate: None,
})
}
pub fn pool_blocks(
&self,
slug: PoolSlug,
start_height: Option<Height>,
) -> Result<Vec<BlockInfoV1>> {
let computer = self.computer();
let max_height = self.height().to_usize();
let start = start_height.map(|h| h.to_usize()).unwrap_or(max_height);
// BytesVec reader gives O(1) mmap reads — efficient for backward scan
let reader = computer.pools.pool.reader();
let end = start.min(reader.len().saturating_sub(1));
let mut heights = Vec::with_capacity(10);
for h in (0..=end).rev() {
if reader.get(h) == slug {
heights.push(h);
if heights.len() >= 10 {
break;
}
}
}
let mut blocks = Vec::with_capacity(heights.len());
for h in heights {
if let Ok(mut v) = self.blocks_v1_range(h, h + 1) {
blocks.append(&mut v);
}
}
Ok(blocks)
}
pub fn pool_hashrate(&self, slug: PoolSlug) -> Result<Vec<PoolHashrateEntry>> {
let pools_list = pools();
let pool = pools_list.get(slug);
let entries = self.compute_pool_hashrate_entries(slug, 0)?;
Ok(entries
.into_iter()
.map(|(ts, hr, share)| PoolHashrateEntry {
timestamp: ts,
avg_hashrate: hr,
share,
pool_name: pool.name.to_string(),
})
.collect())
}
pub fn pools_hashrate(
&self,
time_period: Option<TimePeriod>,
) -> Result<Vec<PoolHashrateEntry>> {
let current_height = self.height().to_usize();
let start = match time_period {
Some(tp) => current_height.saturating_sub(tp.block_count()),
None => 0,
};
let pools_list = pools();
let mut entries = Vec::new();
for pool in pools_list.iter() {
if let Ok(pool_entries) = self.compute_pool_hashrate_entries(pool.slug, start) {
for (ts, hr, share) in pool_entries {
if share > 0.0 {
entries.push(PoolHashrateEntry {
timestamp: ts,
avg_hashrate: hr,
share,
pool_name: pool.name.to_string(),
});
}
}
}
}
Ok(entries)
}
/// Compute (timestamp, hashrate, share) tuples for a pool from `start_height`.
fn compute_pool_hashrate_entries(
&self,
slug: PoolSlug,
start_height: usize,
) -> Result<Vec<(brk_types::Timestamp, u128, f64)>> {
let computer = self.computer();
let indexer = self.indexer();
let end = self.height().to_usize() + 1;
let start = start_height;
let dominance_bps = computer
.pools
.major
.get(&slug)
.map(|v| &v.base.dominance.bps.height)
.or_else(|| {
computer
.pools
.minor
.get(&slug)
.map(|v| &v.dominance.bps.height)
})
.ok_or_else(|| Error::NotFound("Pool not found".into()))?;
let total = end - start;
let step = (total / 200).max(1);
// Batch read everything for the range
let timestamps = indexer.vecs.blocks.timestamp.collect_range_at(start, end);
let bps_values = dominance_bps.collect_range_at(start, end);
let day1_values = computer.indexes.height.day1.collect_range_at(start, end);
let hashrate_vec = &computer.mining.hashrate.rate.base.day1;
// Pre-read all needed hashrates by collecting unique day1 values
let max_day = day1_values.iter().map(|d| d.to_usize()).max().unwrap_or(0);
let min_day = day1_values.iter().map(|d| d.to_usize()).min().unwrap_or(0);
let hashrates = hashrate_vec.collect_range_dyn(min_day, max_day + 1);
Ok((0..total)
.step_by(step)
.filter_map(|i| {
let bps = *bps_values[i];
let share = bps as f64 / 10000.0;
let day_idx = day1_values[i].to_usize() - min_day;
let network_hr = f64::from(*hashrates.get(day_idx)?.as_ref()?);
Some((timestamps[i], (network_hr * share) as u128, share))
})
.collect())
}
}

View File

@@ -1,5 +1,6 @@
use brk_error::Result;
use brk_types::Dollars;
use brk_types::{Dollars, ExchangeRates, HistoricalPrice, HistoricalPriceEntry, Timestamp};
use vecdb::{ReadableVec, VecIndex};
use crate::Query;
@@ -18,4 +19,40 @@ impl Query {
Ok(oracle.price_dollars())
}
pub fn historical_price(&self, timestamp: Option<Timestamp>) -> Result<HistoricalPrice> {
let indexer = self.indexer();
let computer = self.computer();
let max_height = self.height().to_usize();
let end = max_height + 1;
let timestamps = indexer.vecs.blocks.timestamp.collect();
let all_prices = computer.prices.spot.cents.height.collect();
let prices = if let Some(target_ts) = timestamp {
let target = usize::from(target_ts);
let h = timestamps
.binary_search_by_key(&target, |t| usize::from(*t))
.unwrap_or_else(|i| i.min(max_height));
vec![HistoricalPriceEntry {
time: usize::from(timestamps[h]) as u64,
usd: Dollars::from(all_prices[h]),
}]
} else {
let step = (max_height / 200).max(1);
(0..end)
.step_by(step)
.map(|h| HistoricalPriceEntry {
time: usize::from(timestamps[h]) as u64,
usd: Dollars::from(all_prices[h]),
})
.collect()
};
Ok(HistoricalPrice {
prices,
exchange_rates: ExchangeRates {},
})
}
}

View File

@@ -3,8 +3,8 @@ use std::io::Cursor;
use bitcoin::{consensus::Decodable, hex::DisplayHex};
use brk_error::{Error, Result};
use brk_types::{
OutputType, Sats, Transaction, TxIn, TxInIndex, TxIndex, TxOut, TxOutspend, TxStatus, Txid,
TxidParam, TxidPrefix, Vin, Vout, Weight,
Height, MerkleProof, OutputType, Sats, Transaction, TxIn, TxInIndex, TxIndex, TxOut,
TxOutspend, TxStatus, Txid, TxidParam, TxidPrefix, Vin, Vout, Weight,
};
use vecdb::{ReadableVec, VecIndex};
@@ -72,6 +72,20 @@ impl Query {
})
}
pub fn transaction_raw(&self, TxidParam { txid }: TxidParam) -> Result<Vec<u8>> {
let prefix = TxidPrefix::from(&txid);
let indexer = self.indexer();
let Ok(Some(tx_index)) = indexer
.stores
.txid_prefix_to_tx_index
.get(&prefix)
.map(|opt| opt.map(|cow| cow.into_owned()))
else {
return Err(Error::UnknownTxid);
};
self.transaction_raw_by_index(tx_index)
}
pub fn transaction_hex(&self, TxidParam { txid }: TxidParam) -> Result<String> {
// First check mempool for unconfirmed transactions
if let Some(mempool) = self.mempool()
@@ -192,7 +206,6 @@ impl Query {
pub fn transaction_by_index(&self, tx_index: TxIndex) -> Result<Transaction> {
let indexer = self.indexer();
let reader = self.reader();
let computer = self.computer();
// Get tx metadata using collect_one for PcoVec, read_once for BytesVec
let txid = indexer.vecs.transactions.txid.read_once(tx_index)?;
@@ -226,7 +239,12 @@ impl Query {
.first_txin_index
.collect_one(tx_index)
.unwrap();
let position = computer.positions.tx.collect_one(tx_index).unwrap();
let position = indexer
.vecs
.transactions
.position
.collect_one(tx_index)
.unwrap();
// Get block info for status
let block_hash = indexer.vecs.blocks.blockhash.read_once(height)?;
@@ -337,22 +355,15 @@ impl Query {
Ok(transaction)
}
fn transaction_hex_by_index(&self, tx_index: TxIndex) -> Result<String> {
fn transaction_raw_by_index(&self, tx_index: TxIndex) -> Result<Vec<u8>> {
let indexer = self.indexer();
let reader = self.reader();
let computer = self.computer();
let total_size = indexer.vecs.transactions.total_size.collect_one(tx_index).unwrap();
let position = indexer.vecs.transactions.position.collect_one(tx_index).unwrap();
self.reader().read_raw_bytes(position, *total_size as usize)
}
let total_size = indexer
.vecs
.transactions
.total_size
.collect_one(tx_index)
.unwrap();
let position = computer.positions.tx.collect_one(tx_index).unwrap();
let buffer = reader.read_raw_bytes(position, *total_size as usize)?;
Ok(buffer.to_lower_hex_string())
fn transaction_hex_by_index(&self, tx_index: TxIndex) -> Result<String> {
Ok(self.transaction_raw_by_index(tx_index)?.to_lower_hex_string())
}
fn outspend_details(&self, txin_index: TxInIndex) -> Result<TxOutspend> {
@@ -407,4 +418,93 @@ impl Query {
}),
})
}
fn resolve_tx(&self, txid: &Txid) -> Result<(TxIndex, Height)> {
let indexer = self.indexer();
let prefix = TxidPrefix::from(txid);
let tx_index: TxIndex = indexer
.stores
.txid_prefix_to_tx_index
.get(&prefix)?
.map(|cow| cow.into_owned())
.ok_or(Error::UnknownTxid)?;
let height: Height = indexer
.vecs
.transactions
.height
.collect_one(tx_index)
.unwrap();
Ok((tx_index, height))
}
pub fn broadcast_transaction(&self, hex: &str) -> Result<Txid> {
self.client().send_raw_transaction(hex)
}
pub fn merkleblock_proof(&self, txid_param: TxidParam) -> Result<String> {
let (_, height) = self.resolve_tx(&txid_param.txid)?;
let header = self.read_block_header(height)?;
let txids = self.block_txids_by_height(height)?;
let target: bitcoin::Txid = (&txid_param.txid).into();
let btxids: Vec<bitcoin::Txid> = txids.iter().map(bitcoin::Txid::from).collect();
let mb = bitcoin::MerkleBlock::from_header_txids_with_predicate(&header, &btxids, |t| {
*t == target
});
Ok(bitcoin::consensus::encode::serialize_hex(&mb))
}
pub fn merkle_proof(&self, txid_param: TxidParam) -> Result<MerkleProof> {
let (tx_index, height) = self.resolve_tx(&txid_param.txid)?;
let first_tx = self
.indexer()
.vecs
.transactions
.first_tx_index
.collect_one(height)
.ok_or(Error::NotFound("Block not found".into()))?;
let pos = tx_index.to_usize() - first_tx.to_usize();
let txids = self.block_txids_by_height(height)?;
Ok(MerkleProof {
block_height: height,
merkle: merkle_path(&txids, pos),
pos,
})
}
}
fn merkle_path(txids: &[Txid], pos: usize) -> Vec<String> {
use bitcoin::hashes::{Hash, sha256d};
// Txid bytes are in internal order (same layout as bitcoin::Txid)
let mut hashes: Vec<[u8; 32]> = txids
.iter()
.map(|t| bitcoin::Txid::from(t).to_byte_array())
.collect();
let mut proof = Vec::new();
let mut idx = pos;
while hashes.len() > 1 {
let sibling = if idx ^ 1 < hashes.len() { idx ^ 1 } else { idx };
// Display order: reverse bytes for hex output
let mut display = hashes[sibling];
display.reverse();
proof.push(bitcoin::hex::DisplayHex::to_lower_hex_string(&display));
hashes = hashes
.chunks(2)
.map(|pair| {
let right = pair.last().unwrap();
let mut combined = [0u8; 64];
combined[..32].copy_from_slice(&pair[0]);
combined[32..].copy_from_slice(right);
sha256d::Hash::hash(&combined).to_byte_array()
})
.collect();
idx /= 2;
}
proof
}

View File

@@ -21,7 +21,7 @@ fn main() -> Result<()> {
if let Some(block) = reader.read(Some(height), Some(height)).iter().next() {
println!(
"height={} hash={} txs={} coinbase=\"{}\" ({:?})",
"height={} hash={} txs={} coinbase=\"{:?}\" ({:?})",
block.height(),
block.hash(),
block.txdata.len(),

View File

@@ -14,7 +14,7 @@ use bitcoin::{block::Header, consensus::Decodable};
use blk_index_to_blk_path::*;
use brk_error::{Error, Result};
use brk_rpc::Client;
use brk_types::{BlkMetadata, BlkPosition, BlockHash, Height, ReadBlock};
use brk_types::{BlkPosition, BlockHash, Height, ReadBlock};
pub use crossbeam::channel::Receiver;
use crossbeam::channel::bounded;
use derive_more::Deref;
@@ -24,28 +24,17 @@ use tracing::{error, warn};
mod blk_index_to_blk_path;
mod decode;
mod scan;
mod xor_bytes;
mod xor_index;
use decode::*;
use scan::*;
pub use xor_bytes::*;
pub use xor_index::*;
const MAGIC_BYTES: [u8; 4] = [249, 190, 180, 217];
const BOUND_CAP: usize = 50;
fn find_magic(bytes: &[u8], xor_i: &mut XORIndex, xor_bytes: XORBytes) -> Option<usize> {
let mut window = [0u8; 4];
for (i, &b) in bytes.iter().enumerate() {
window.rotate_left(1);
window[3] = xor_i.byte(b, xor_bytes);
if window == MAGIC_BYTES {
return Some(i + 1);
}
}
None
}
///
/// Bitcoin BLK file reader
///
@@ -117,10 +106,46 @@ impl ReaderInner {
Ok(buffer)
}
/// Returns a receiver streaming `ReadBlock`s from `hash + 1` to the chain tip.
/// If `hash` is `None`, starts from genesis.
pub fn after(&self, hash: Option<BlockHash>) -> Result<Receiver<ReadBlock>> {
let start = if let Some(hash) = hash.as_ref() {
let info = self.client.get_block_header_info(hash)?;
Height::from(info.height + 1)
} else {
Height::ZERO
};
let end = self.client.get_last_height()?;
if end < start {
return Ok(bounded(0).1);
}
if *end - *start < 10 {
let mut blocks: Vec<_> = self.read_rev(Some(start), Some(end)).iter().collect();
blocks.reverse();
let (send, recv) = bounded(blocks.len());
for block in blocks {
let _ = send.send(block);
}
return Ok(recv);
}
Ok(self.read(Some(start), Some(end)))
}
/// Returns a crossbeam channel receiver that streams `ReadBlock`s in chain order.
///
/// Both `start` and `end` are inclusive. `None` means unbounded.
pub fn read(&self, start: Option<Height>, end: Option<Height>) -> Receiver<ReadBlock> {
if let (Some(s), Some(e)) = (start, end)
&& s > e
{
let (_, recv) = bounded(0);
return recv;
}
let client = self.client.clone();
let (send_bytes, recv_bytes) = bounded(BOUND_CAP / 2);
@@ -151,53 +176,25 @@ impl ReaderInner {
thread::spawn(move || {
let _ = blk_index_to_blk_path.range(first_blk_index..).try_for_each(
move |(blk_index, blk_path)| {
let mut xor_i = XORIndex::default();
let blk_index = *blk_index;
let Ok(mut blk_bytes_) = fs::read(blk_path) else {
let Ok(mut bytes) = fs::read(blk_path) else {
error!("Failed to read blk file: {}", blk_path.display());
return ControlFlow::Break(());
};
let blk_bytes = blk_bytes_.as_mut_slice();
let mut i = 0;
loop {
let Some(offset) = find_magic(&blk_bytes[i..], &mut xor_i, xor_bytes)
else {
break;
};
i += offset;
if i + 4 > blk_bytes.len() {
warn!("Truncated blk file {blk_index}: not enough bytes for block length at offset {i}");
break;
}
let len = u32::from_le_bytes(
xor_i
.bytes(&mut blk_bytes[i..(i + 4)], xor_bytes)
.try_into()
.unwrap(),
) as usize;
i += 4;
if i + len > blk_bytes.len() {
warn!("Truncated blk file {blk_index}: block at offset {} claims {len} bytes but only {} remain", i - 4, blk_bytes.len() - i);
break;
}
let position = BlkPosition::new(blk_index, i as u32);
let metadata = BlkMetadata::new(position, len as u32);
let block_bytes = (blk_bytes[i..(i + len)]).to_vec();
if send_bytes.send((metadata, block_bytes, xor_i)).is_err() {
return ControlFlow::Break(());
}
i += len;
xor_i.add_assign(len);
let result = scan_bytes(
&mut bytes,
*blk_index,
0,
xor_bytes,
|metadata, block_bytes, xor_i| {
if send_bytes.send((metadata, block_bytes, xor_i)).is_err() {
return ControlFlow::Break(());
}
ControlFlow::Continue(())
},
);
if result.interrupted {
return ControlFlow::Break(());
}
ControlFlow::Continue(())
},
);
@@ -288,6 +285,83 @@ impl ReaderInner {
recv_ordered
}
/// Streams `ReadBlock`s in reverse order (newest first) by scanning
/// `.blk` files from the tail. Efficient for reading recent blocks.
/// Both `start` and `end` are inclusive. `None` means unbounded.
pub fn read_rev(&self, start: Option<Height>, end: Option<Height>) -> Receiver<ReadBlock> {
const CHUNK: usize = 5 * 1024 * 1024;
if let (Some(s), Some(e)) = (start, end)
&& s > e
{
return bounded(0).1;
}
let client = self.client.clone();
let xor_bytes = self.xor_bytes;
let paths = BlkIndexToBlkPath::scan(&self.blocks_dir);
*self.blk_index_to_blk_path.write() = paths.clone();
let (send, recv) = bounded(BOUND_CAP);
thread::spawn(move || {
let mut head = Vec::new();
for (&blk_index, path) in paths.iter().rev() {
let file_len = fs::metadata(path).map(|m| m.len() as usize).unwrap_or(0);
if file_len == 0 {
continue;
}
let Ok(mut file) = File::open(path) else {
return;
};
let mut read_end = file_len;
while read_end > 0 {
let read_start = read_end.saturating_sub(CHUNK);
let chunk_len = read_end - read_start;
read_end = read_start;
let _ = file.seek(SeekFrom::Start(read_start as u64));
let mut buf = vec![0u8; chunk_len + head.len()];
if file.read_exact(&mut buf[..chunk_len]).is_err() {
return;
}
buf[chunk_len..].copy_from_slice(&head);
head.clear();
let mut blocks = Vec::new();
let result = scan_bytes(
&mut buf,
blk_index,
read_start,
xor_bytes,
|metadata, bytes, xor_i| {
if let Ok(Some(block)) = decode_block(
bytes, metadata, &client, xor_i, xor_bytes, start, end, 0, 0,
) {
blocks.push(block);
}
ControlFlow::Continue(())
},
);
for block in blocks.into_iter().rev() {
let done = start.is_some_and(|s| block.height() <= s);
if send.send(block).is_err() || done {
return;
}
}
if read_start > 0 {
head = buf[..result.first_magic.unwrap_or(buf.len())].to_vec();
}
}
}
});
recv
}
fn find_start_blk_index(
&self,
target_start: Option<Height>,
@@ -298,18 +372,6 @@ impl ReaderInner {
return Ok(0);
};
// If start is a very recent block we only look back X blk file before the last
if let Ok(height) = self.client.get_last_height()
&& (*height).saturating_sub(*target_start) <= 3
{
return Ok(blk_index_to_blk_path
.keys()
.rev()
.nth(2)
.copied()
.unwrap_or_default());
}
let blk_indices: Vec<u16> = blk_index_to_blk_path.keys().copied().collect();
if blk_indices.is_empty() {

View File

@@ -0,0 +1,73 @@
use std::ops::ControlFlow;
use brk_types::{BlkMetadata, BlkPosition};
use crate::{XORBytes, XORIndex};
const MAGIC_BYTES: [u8; 4] = [249, 190, 180, 217];
pub fn find_magic(bytes: &[u8], xor_i: &mut XORIndex, xor_bytes: XORBytes) -> Option<usize> {
let mut window = [0u8; 4];
for (i, &b) in bytes.iter().enumerate() {
window.rotate_left(1);
window[3] = xor_i.byte(b, xor_bytes);
if window == MAGIC_BYTES {
return Some(i + 1);
}
}
None
}
pub struct ScanResult {
pub first_magic: Option<usize>,
pub interrupted: bool,
}
/// Scans `buf` for blocks. `file_offset` is the absolute position of `buf[0]` in the file.
/// Calls `on_block` for each complete block found.
pub fn scan_bytes(
buf: &mut [u8],
blk_index: u16,
file_offset: usize,
xor_bytes: XORBytes,
mut on_block: impl FnMut(BlkMetadata, Vec<u8>, XORIndex) -> ControlFlow<()>,
) -> ScanResult {
let mut xor_i = XORIndex::default();
xor_i.add_assign(file_offset);
let mut first_magic = None;
let mut i = 0;
while let Some(off) = find_magic(&buf[i..], &mut xor_i, xor_bytes) {
let before = i;
i += off;
first_magic.get_or_insert(before + off.saturating_sub(4));
if i + 4 > buf.len() {
break;
}
let len = u32::from_le_bytes(
xor_i
.bytes(&mut buf[i..i + 4], xor_bytes)
.try_into()
.unwrap(),
) as usize;
i += 4;
if i + len > buf.len() {
break;
}
let position = BlkPosition::new(blk_index, (file_offset + i) as u32);
let metadata = BlkMetadata::new(position, len as u32);
if on_block(metadata, buf[i..i + len].to_vec(), xor_i).is_break() {
return ScanResult {
first_magic,
interrupted: true,
};
}
i += len;
xor_i.add_assign(len);
}
ScanResult {
first_magic,
interrupted: false,
}
}

View File

@@ -240,4 +240,8 @@ impl ClientInner {
) -> Result<String> {
Ok(self.call_with_retry(|c| c.get_raw_transaction_hex(txid, block_hash))?)
}
pub fn send_raw_transaction(&self, hex: &str) -> Result<bitcoin::Txid> {
Ok(self.call_once(|c| c.send_raw_transaction(hex))?)
}
}

View File

@@ -294,6 +294,14 @@ impl ClientInner {
})?;
Ok(r)
}
pub fn send_raw_transaction(&self, hex: &str) -> Result<bitcoin::Txid> {
let hex = hex.to_string();
Ok(self.call_with_retry(|c| {
let args = [serde_json::Value::String(hex.clone())];
c.call("sendrawtransaction", &args)
})?)
}
}
// Local deserialization structs for raw RPC responses

View File

@@ -232,6 +232,10 @@ impl Client {
.get_raw_transaction_hex(txid.into(), block_hash.map(|h| h.into()))
}
pub fn send_raw_transaction(&self, hex: &str) -> Result<Txid> {
self.0.send_raw_transaction(hex).map(Txid::from)
}
/// Checks if a block is in the main chain (has positive confirmations)
pub fn is_in_main_chain(&self, hash: &BlockHash) -> Result<bool> {
let block_info = self.get_block_info(hash)?;

View File

@@ -37,7 +37,7 @@ impl AddrRoutes for ApiRouter<AppState> {
.addrs_tag()
.summary("Address information")
.description("Retrieve address information including balance and transaction counts. Supports all standard Bitcoin address types (P2PKH, P2SH, P2WPKH, P2WSH, P2TR).\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-address)*")
.ok_response::<AddrStats>()
.json_response::<AddrStats>()
.not_modified()
.bad_request()
.not_found()
@@ -59,7 +59,7 @@ impl AddrRoutes for ApiRouter<AppState> {
.addrs_tag()
.summary("Address transactions")
.description("Get transaction history for an address, sorted with newest first. Returns up to 50 mempool transactions plus the first 25 confirmed transactions. Use ?after_txid=<txid> for pagination.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-address-transactions)*")
.ok_response::<Vec<Transaction>>()
.json_response::<Vec<Transaction>>()
.not_modified()
.bad_request()
.not_found()
@@ -81,7 +81,7 @@ impl AddrRoutes for ApiRouter<AppState> {
.addrs_tag()
.summary("Address confirmed transactions")
.description("Get confirmed transactions for an address, 25 per page. Use ?after_txid=<txid> for pagination.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-address-transactions-chain)*")
.ok_response::<Vec<Transaction>>()
.json_response::<Vec<Transaction>>()
.not_modified()
.bad_request()
.not_found()
@@ -103,7 +103,7 @@ impl AddrRoutes for ApiRouter<AppState> {
.addrs_tag()
.summary("Address mempool transactions")
.description("Get unconfirmed transaction IDs for an address from the mempool (up to 50).\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-address-transactions-mempool)*")
.ok_response::<Vec<Txid>>()
.json_response::<Vec<Txid>>()
.bad_request()
.not_found()
.server_error()
@@ -123,7 +123,7 @@ impl AddrRoutes for ApiRouter<AppState> {
.addrs_tag()
.summary("Address UTXOs")
.description("Get unspent transaction outputs (UTXOs) for an address. Returns txid, vout, value, and confirmation status for each UTXO.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-address-utxo)*")
.ok_response::<Vec<Utxo>>()
.json_response::<Vec<Utxo>>()
.not_modified()
.bad_request()
.not_found()
@@ -144,7 +144,7 @@ impl AddrRoutes for ApiRouter<AppState> {
.addrs_tag()
.summary("Validate address")
.description("Validate a Bitcoin address and get information about its type and scriptPubKey.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-address-validate)*")
.ok_response::<AddrValidation>()
.json_response::<AddrValidation>()
.not_modified()
),
)

View File

@@ -5,8 +5,8 @@ use axum::{
};
use brk_query::BLOCK_TXS_PAGE_SIZE;
use brk_types::{
BlockHashParam, BlockHashStartIndex, BlockHashTxIndex, BlockInfo, BlockStatus, BlockTimestamp,
HeightParam, TimestampParam, Transaction, Txid,
BlockHashParam, BlockHashStartIndex, BlockHashTxIndex, BlockInfo, BlockInfoV1, BlockStatus,
BlockTimestamp, HeightParam, TimestampParam, Transaction, TxIndex, Txid,
};
use crate::{CacheStrategy, extended::TransformResponseExtended};
@@ -20,72 +20,6 @@ pub trait BlockRoutes {
impl BlockRoutes for ApiRouter<AppState> {
fn add_block_routes(self) -> Self {
self.api_route(
"/api/blocks",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.blocks(None))
.await
},
|op| {
op.id("get_blocks")
.blocks_tag()
.summary("Recent blocks")
.description("Retrieve the last 10 blocks. Returns block metadata for each block.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-blocks)*")
.ok_response::<Vec<BlockInfo>>()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/blocks/{height}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<HeightParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.blocks(Some(path.height))).await
},
|op| {
op.id("get_blocks_from_height")
.blocks_tag()
.summary("Blocks from height")
.description(
"Retrieve up to 10 blocks going backwards from the given height. For example, height=100 returns blocks 100, 99, 98, ..., 91. Height=0 returns only block 0.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-blocks)*",
)
.ok_response::<Vec<BlockInfo>>()
.not_modified()
.bad_request()
.server_error()
},
),
)
.api_route(
"/api/block-height/{height}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<HeightParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.block_by_height(path.height)).await
},
|op| {
op.id("get_block_by_height")
.blocks_tag()
.summary("Block by height")
.description(
"Retrieve block information by block height. Returns block metadata including hash, timestamp, difficulty, size, weight, and transaction count.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-height)*",
)
.ok_response::<BlockInfo>()
.not_modified()
.bad_request()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/block/{hash}",
get_with(
async |uri: Uri,
@@ -101,7 +35,7 @@ impl BlockRoutes for ApiRouter<AppState> {
.description(
"Retrieve block information by block hash. Returns block metadata including height, timestamp, difficulty, size, weight, and transaction count.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block)*",
)
.ok_response::<BlockInfo>()
.json_response::<BlockInfo>()
.not_modified()
.bad_request()
.not_found()
@@ -110,22 +44,61 @@ impl BlockRoutes for ApiRouter<AppState> {
),
)
.api_route(
"/api/block/{hash}/status",
"/api/v1/block/{hash}",
get_with(
async |uri: Uri, headers: HeaderMap, Path(path): Path<BlockHashParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| {
let height = q.height_by_hash(&path.hash)?;
q.block_by_height_v1(height)
}).await
},
|op| {
op.id("get_block_v1")
.blocks_tag()
.summary("Block (v1)")
.description("Returns block details with extras by hash.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-v1)*")
.json_response::<BlockInfoV1>()
.not_modified()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/block/{hash}/header",
get_with(
async |uri: Uri, headers: HeaderMap, Path(path): Path<BlockHashParam>, State(state): State<AppState>| {
state.cached_text(&headers, CacheStrategy::Height, &uri, move |q| q.block_header_hex(&path.hash)).await
},
|op| {
op.id("get_block_header")
.blocks_tag()
.summary("Block header")
.description("Returns the hex-encoded block header.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-header)*")
.text_response()
.not_modified()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/block-height/{height}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<BlockHashParam>,
Path(path): Path<HeightParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.block_status(&path.hash)).await
state.cached_text(&headers, CacheStrategy::Height, &uri, move |q| q.block_hash_by_height(path.height).map(|h| h.to_string())).await
},
|op| {
op.id("get_block_status")
op.id("get_block_by_height")
.blocks_tag()
.summary("Block status")
.summary("Block hash by height")
.description(
"Retrieve the status of a block. Returns whether the block is in the best chain and, if so, its height and the hash of the next block.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-status)*",
"Retrieve the block hash at a given height. Returns the hash as plain text.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-height)*",
)
.ok_response::<BlockStatus>()
.text_response()
.not_modified()
.bad_request()
.not_found()
@@ -134,71 +107,20 @@ impl BlockRoutes for ApiRouter<AppState> {
),
)
.api_route(
"/api/block/{hash}/txids",
"/api/v1/mining/blocks/timestamp/{timestamp}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<BlockHashParam>,
Path(path): Path<TimestampParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Static, &uri, move |q| q.block_txids(&path.hash)).await
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.block_by_timestamp(path.timestamp)).await
},
|op| {
op.id("get_block_txids")
op.id("get_block_by_timestamp")
.blocks_tag()
.summary("Block transaction IDs")
.description(
"Retrieve all transaction IDs in a block. Returns an array of txids in block order.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-transaction-ids)*",
)
.ok_response::<Vec<Txid>>()
.not_modified()
.bad_request()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/block/{hash}/txs/{start_index}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<BlockHashStartIndex>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Static, &uri, move |q| q.block_txs(&path.hash, path.start_index)).await
},
|op| {
op.id("get_block_txs")
.blocks_tag()
.summary("Block transactions (paginated)")
.description(&format!(
"Retrieve transactions in a block by block hash, starting from the specified index. Returns up to {} transactions at a time.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-transactions)*",
BLOCK_TXS_PAGE_SIZE
))
.ok_response::<Vec<Transaction>>()
.not_modified()
.bad_request()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/block/{hash}/txid/{index}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<BlockHashTxIndex>,
State(state): State<AppState>| {
state.cached_text(&headers, CacheStrategy::Static, &uri, move |q| q.block_txid_at_index(&path.hash, path.index).map(|t| t.to_string())).await
},
|op| {
op.id("get_block_txid")
.blocks_tag()
.summary("Transaction ID at index")
.description(
"Retrieve a single transaction ID at a specific index within a block. Returns plain text txid.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-transaction-id)*",
)
.ok_response::<Txid>()
.summary("Block by timestamp")
.description("Find the block closest to a given UNIX timestamp.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-timestamp)*")
.json_response::<BlockTimestamp>()
.not_modified()
.bad_request()
.not_found()
@@ -222,7 +144,7 @@ impl BlockRoutes for ApiRouter<AppState> {
.description(
"Returns the raw block data in binary format.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-raw)*",
)
.ok_response::<Vec<u8>>()
.json_response::<Vec<u8>>()
.not_modified()
.bad_request()
.not_found()
@@ -231,20 +153,22 @@ impl BlockRoutes for ApiRouter<AppState> {
),
)
.api_route(
"/api/v1/mining/blocks/timestamp/{timestamp}",
"/api/block/{hash}/status",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<TimestampParam>,
Path(path): Path<BlockHashParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.block_by_timestamp(path.timestamp)).await
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.block_status(&path.hash)).await
},
|op| {
op.id("get_block_by_timestamp")
op.id("get_block_status")
.blocks_tag()
.summary("Block by timestamp")
.description("Find the block closest to a given UNIX timestamp.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-timestamp)*")
.ok_response::<BlockTimestamp>()
.summary("Block status")
.description(
"Retrieve the status of a block. Returns whether the block is in the best chain and, if so, its height and the hash of the next block.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-status)*",
)
.json_response::<BlockStatus>()
.not_modified()
.bad_request()
.not_found()
@@ -252,5 +176,219 @@ impl BlockRoutes for ApiRouter<AppState> {
},
),
)
.api_route(
"/api/blocks/tip/height",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state.cached_text(&headers, CacheStrategy::Height, &uri, |q| Ok(q.height().to_string())).await
},
|op| {
op.id("get_block_tip_height")
.blocks_tag()
.summary("Block tip height")
.description("Returns the height of the last block.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-tip-height)*")
.text_response()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/blocks/tip/hash",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state.cached_text(&headers, CacheStrategy::Height, &uri, |q| q.block_hash_by_height(q.height()).map(|h| h.to_string())).await
},
|op| {
op.id("get_block_tip_hash")
.blocks_tag()
.summary("Block tip hash")
.description("Returns the hash of the last block.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-tip-hash)*")
.text_response()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/block/{hash}/txid/{index}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<BlockHashTxIndex>,
State(state): State<AppState>| {
state.cached_text(&headers, CacheStrategy::Static, &uri, move |q| q.block_txid_at_index(&path.hash, path.index).map(|t| t.to_string())).await
},
|op| {
op.id("get_block_txid")
.blocks_tag()
.summary("Transaction ID at index")
.description(
"Retrieve a single transaction ID at a specific index within a block. Returns plain text txid.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-transaction-id)*",
)
.text_response()
.not_modified()
.bad_request()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/block/{hash}/txids",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<BlockHashParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Static, &uri, move |q| q.block_txids(&path.hash)).await
},
|op| {
op.id("get_block_txids")
.blocks_tag()
.summary("Block transaction IDs")
.description(
"Retrieve all transaction IDs in a block. Returns an array of txids in block order.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-transaction-ids)*",
)
.json_response::<Vec<Txid>>()
.not_modified()
.bad_request()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/block/{hash}/txs",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<BlockHashParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Static, &uri, move |q| q.block_txs(&path.hash, TxIndex::default())).await
},
|op| {
op.id("get_block_txs")
.blocks_tag()
.summary("Block transactions")
.description(&format!(
"Retrieve transactions in a block by block hash. Returns up to {} transactions starting from index 0.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-transactions)*",
BLOCK_TXS_PAGE_SIZE
))
.json_response::<Vec<Transaction>>()
.not_modified()
.bad_request()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/block/{hash}/txs/{start_index}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<BlockHashStartIndex>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Static, &uri, move |q| q.block_txs(&path.hash, path.start_index)).await
},
|op| {
op.id("get_block_txs_from_index")
.blocks_tag()
.summary("Block transactions (paginated)")
.description(&format!(
"Retrieve transactions in a block by block hash, starting from the specified index. Returns up to {} transactions at a time.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-transactions)*",
BLOCK_TXS_PAGE_SIZE
))
.json_response::<Vec<Transaction>>()
.not_modified()
.bad_request()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/blocks",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.blocks(None))
.await
},
|op| {
op.id("get_blocks")
.blocks_tag()
.summary("Recent blocks")
.description("Retrieve the last 10 blocks. Returns block metadata for each block.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-blocks)*")
.json_response::<Vec<BlockInfo>>()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/blocks/{height}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<HeightParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.blocks(Some(path.height))).await
},
|op| {
op.id("get_blocks_from_height")
.blocks_tag()
.summary("Blocks from height")
.description(
"Retrieve up to 10 blocks going backwards from the given height. For example, height=100 returns blocks 100, 99, 98, ..., 91. Height=0 returns only block 0.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-blocks)*",
)
.json_response::<Vec<BlockInfo>>()
.not_modified()
.bad_request()
.server_error()
},
),
)
.api_route(
"/api/v1/blocks",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.blocks_v1(None))
.await
},
|op| {
op.id("get_blocks_v1")
.blocks_tag()
.summary("Recent blocks with extras")
.description("Retrieve the last 10 blocks with extended data including pool identification and fee statistics.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-blocks-v1)*")
.json_response::<Vec<BlockInfoV1>>()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/v1/blocks/{height}",
get_with(
async |uri: Uri,
headers: HeaderMap,
Path(path): Path<HeightParam>,
State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.blocks_v1(Some(path.height))).await
},
|op| {
op.id("get_blocks_v1_from_height")
.blocks_tag()
.summary("Blocks from height with extras")
.description("Retrieve up to 10 blocks with extended data going backwards from the given height.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-blocks-v1)*")
.json_response::<Vec<BlockInfoV1>>()
.not_modified()
.bad_request()
.server_error()
},
),
)
}
}

View File

@@ -0,0 +1,79 @@
use aide::axum::{ApiRouter, routing::get_with};
use axum::{
extract::State,
http::{HeaderMap, Uri},
};
use brk_types::{MempoolBlock, RecommendedFees};
use crate::extended::TransformResponseExtended;
use super::AppState;
pub trait FeesRoutes {
fn add_fees_routes(self) -> Self;
}
impl FeesRoutes for ApiRouter<AppState> {
fn add_fees_routes(self) -> Self {
self.api_route(
"/api/v1/fees/mempool-blocks",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, state.mempool_cache(), &uri, |q| {
q.mempool_blocks()
})
.await
},
|op| {
op.id("get_mempool_blocks")
.fees_tag()
.summary("Projected mempool blocks")
.description("Get projected blocks from the mempool for fee estimation.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mempool-blocks-fees)*")
.json_response::<Vec<MempoolBlock>>()
.server_error()
},
),
)
.api_route(
"/api/v1/fees/recommended",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, state.mempool_cache(), &uri, |q| {
q.recommended_fees()
})
.await
},
|op| {
op.id("get_recommended_fees")
.fees_tag()
.summary("Recommended fees")
.description("Get recommended fee rates for different confirmation targets.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-recommended-fees)*")
.json_response::<RecommendedFees>()
.server_error()
},
),
)
.api_route(
"/api/v1/fees/precise",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, state.mempool_cache(), &uri, |q| {
q.recommended_fees()
})
.await
},
|op| {
op.id("get_precise_fees")
.fees_tag()
.summary("Precise recommended fees")
.description("Get recommended fee rates with up to 3 decimal places.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-recommended-fees-precise)*")
.json_response::<RecommendedFees>()
.server_error()
},
),
)
}
}

View File

@@ -0,0 +1,89 @@
use aide::axum::{ApiRouter, routing::get_with};
use axum::{
extract::{Query, State},
http::{HeaderMap, Uri},
};
use brk_types::{
DifficultyAdjustment, HistoricalPrice, OptionalTimestampParam, Prices, Timestamp,
};
use crate::{CacheStrategy, extended::TransformResponseExtended};
use super::AppState;
pub trait GeneralRoutes {
fn add_general_routes(self) -> Self;
}
impl GeneralRoutes for ApiRouter<AppState> {
fn add_general_routes(self) -> Self {
self.api_route(
"/api/v1/difficulty-adjustment",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, CacheStrategy::Height, &uri, |q| {
q.difficulty_adjustment()
})
.await
},
|op| {
op.id("get_difficulty_adjustment")
.general_tag()
.summary("Difficulty adjustment")
.description("Get current difficulty adjustment progress and estimates.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-difficulty-adjustment)*")
.json_response::<DifficultyAdjustment>()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/v1/prices",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, state.mempool_cache(), &uri, |q| {
Ok(Prices {
time: Timestamp::now(),
usd: q.live_price()?,
})
})
.await
},
|op| {
op.id("get_prices")
.general_tag()
.summary("Current BTC price")
.description("Returns bitcoin latest price (on-chain derived, USD only).\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-price)*")
.json_response::<Prices>()
.server_error()
},
),
)
.api_route(
"/api/v1/historical-price",
get_with(
async |uri: Uri,
headers: HeaderMap,
Query(params): Query<OptionalTimestampParam>,
State(state): State<AppState>| {
state
.cached_json(&headers, CacheStrategy::Height, &uri, move |q| {
q.historical_price(params.timestamp)
})
.await
},
|op| {
op.id("get_historical_price")
.general_tag()
.summary("Historical price")
.description("Get historical BTC/USD price. Optionally specify a UNIX timestamp to get the price at that time.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-historical-price)*")
.json_response::<HistoricalPrice>()
.not_modified()
.server_error()
},
),
)
}
}

View File

@@ -2,10 +2,8 @@ use aide::axum::{ApiRouter, routing::get_with};
use axum::{
extract::State,
http::{HeaderMap, Uri},
response::Redirect,
routing::get,
};
use brk_types::{Dollars, MempoolBlock, MempoolInfo, RecommendedFees, Txid};
use brk_types::{Dollars, MempoolInfo, MempoolRecentTx, Txid};
use crate::extended::TransformResponseExtended;
@@ -17,91 +15,81 @@ pub trait MempoolRoutes {
impl MempoolRoutes for ApiRouter<AppState> {
fn add_mempool_routes(self) -> Self {
self
.route("/api/mempool", get(Redirect::temporary("/api#tag/mempool")))
.api_route(
"/api/mempool/info",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state.cached_json(&headers, state.mempool_cache(), &uri, |q| q.mempool_info()).await
},
|op| {
op.id("get_mempool")
.mempool_tag()
.summary("Mempool statistics")
.description("Get current mempool statistics including transaction count, total vsize, and total fees.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mempool)*")
.ok_response::<MempoolInfo>()
.server_error()
},
),
)
.api_route(
"/api/mempool/txids",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state.cached_json(&headers, state.mempool_cache(), &uri, |q| q.mempool_txids()).await
},
|op| {
op.id("get_mempool_txids")
.mempool_tag()
.summary("Mempool transaction IDs")
.description("Get all transaction IDs currently in the mempool.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mempool-transaction-ids)*")
.ok_response::<Vec<Txid>>()
.server_error()
},
),
)
.api_route(
"/api/mempool/price",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state.cached_json(&headers, state.mempool_cache(), &uri, |q| q.live_price()).await
},
|op| {
op.id("get_live_price")
.mempool_tag()
.summary("Live BTC/USD price")
.description(
"Returns the current BTC/USD price in dollars, derived from \
on-chain round-dollar output patterns in the last 12 blocks \
plus mempool.",
)
.ok_response::<Dollars>()
.server_error()
},
),
)
.api_route(
"/api/v1/fees/recommended",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state.cached_json(&headers, state.mempool_cache(), &uri, |q| q.recommended_fees()).await
},
|op| {
op.id("get_recommended_fees")
.mempool_tag()
.summary("Recommended fees")
.description("Get recommended fee rates for different confirmation targets based on current mempool state.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-recommended-fees)*")
.ok_response::<RecommendedFees>()
.server_error()
},
),
)
.api_route(
"/api/v1/fees/mempool-blocks",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state.cached_json(&headers, state.mempool_cache(), &uri, |q| q.mempool_blocks()).await
},
|op| {
op.id("get_mempool_blocks")
.mempool_tag()
.summary("Projected mempool blocks")
.description("Get projected blocks from the mempool for fee estimation. Each block contains statistics about transactions that would be included if a block were mined now.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mempool-blocks-fees)*")
.ok_response::<Vec<MempoolBlock>>()
.server_error()
},
),
)
self.api_route(
"/api/mempool",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, state.mempool_cache(), &uri, |q| q.mempool_info())
.await
},
|op| {
op.id("get_mempool")
.mempool_tag()
.summary("Mempool statistics")
.description("Get current mempool statistics including transaction count, total vsize, total fees, and fee histogram.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mempool)*")
.json_response::<MempoolInfo>()
.server_error()
},
),
)
.api_route(
"/api/mempool/txids",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, state.mempool_cache(), &uri, |q| q.mempool_txids())
.await
},
|op| {
op.id("get_mempool_txids")
.mempool_tag()
.summary("Mempool transaction IDs")
.description("Get all transaction IDs currently in the mempool.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mempool-transaction-ids)*")
.json_response::<Vec<Txid>>()
.server_error()
},
),
)
.api_route(
"/api/mempool/recent",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, state.mempool_cache(), &uri, |q| q.mempool_recent())
.await
},
|op| {
op.id("get_mempool_recent")
.mempool_tag()
.summary("Recent mempool transactions")
.description("Get the last 10 transactions to enter the mempool.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mempool-recent)*")
.json_response::<Vec<MempoolRecentTx>>()
.server_error()
},
),
)
.api_route(
"/api/mempool/price",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, state.mempool_cache(), &uri, |q| q.live_price())
.await
},
|op| {
op.id("get_live_price")
.mempool_tag()
.summary("Live BTC/USD price")
.description(
"Returns the current BTC/USD price in dollars, derived from \
on-chain round-dollar output patterns in the last 12 blocks \
plus mempool.",
)
.json_response::<Dollars>()
.server_error()
},
),
)
}
}

View File

@@ -57,7 +57,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<TreeNode>()
.json_response::<TreeNode>()
.not_modified(),
),
)
@@ -80,7 +80,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/count` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<DetailedSeriesCount>()
.json_response::<DetailedSeriesCount>()
.not_modified(),
),
)
@@ -103,7 +103,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/indexes` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<Vec<IndexInfo>>()
.json_response::<Vec<IndexInfo>>()
.not_modified(),
),
)
@@ -127,7 +127,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/list` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<PaginatedSeries>()
.json_response::<PaginatedSeries>()
.not_modified(),
),
)
@@ -151,7 +151,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/search` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<Vec<&str>>()
.json_response::<Vec<&str>>()
.not_modified()
.server_error(),
),
@@ -173,7 +173,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/bulk` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<Vec<SeriesData>>()
.json_response::<Vec<SeriesData>>()
.csv_response()
.not_modified(),
),
@@ -201,7 +201,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/{series}` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<SeriesInfo>()
.json_response::<SeriesInfo>()
.not_modified()
.not_found()
.server_error(),
@@ -231,7 +231,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/{series}/{index}` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<SeriesData>()
.json_response::<SeriesData>()
.csv_response()
.not_modified()
.not_found(),
@@ -261,7 +261,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/{series}/{index}/data` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<Vec<serde_json::Value>>()
.json_response::<Vec<serde_json::Value>>()
.csv_response()
.not_modified()
.not_found(),
@@ -289,7 +289,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/{series}/{index}/latest` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<serde_json::Value>()
.json_response::<serde_json::Value>()
.not_found(),
),
)
@@ -315,7 +315,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/{series}/{index}/len` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<usize>()
.json_response::<usize>()
.not_found(),
),
)
@@ -341,7 +341,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/{series}/{index}/version` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<brk_types::Version>()
.json_response::<brk_types::Version>()
.not_found(),
),
)
@@ -363,7 +363,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/cost-basis` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<Vec<String>>()
.json_response::<Vec<String>>()
.server_error()
},
),
@@ -390,7 +390,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/cost-basis/{cohort}/dates` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<Vec<Date>>()
.json_response::<Vec<Date>>()
.not_found()
.server_error()
},
@@ -424,7 +424,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
"**DEPRECATED** - Use `/api/series/cost-basis/{cohort}/{date}` instead.\n\n\
Sunset date: 2027-01-01."
)
.ok_response::<CostBasisFormatted>()
.json_response::<CostBasisFormatted>()
.not_found()
.server_error()
},
@@ -471,7 +471,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
Returns raw data without the SeriesData wrapper."
)
.deprecated()
.ok_response::<serde_json::Value>()
.json_response::<serde_json::Value>()
.not_modified(),
),
)
@@ -498,7 +498,7 @@ impl ApiMetricsLegacyRoutes for ApiRouter<AppState> {
Legacy endpoint for querying series. Returns raw data without the SeriesData wrapper."
)
.deprecated()
.ok_response::<serde_json::Value>()
.json_response::<serde_json::Value>()
.not_modified(),
),
)

View File

@@ -6,8 +6,9 @@ use axum::{
routing::get,
};
use brk_types::{
BlockCountParam, BlockFeesEntry, BlockRewardsEntry, BlockSizesWeights, DifficultyAdjustment,
DifficultyAdjustmentEntry, HashrateSummary, PoolDetail, PoolInfo, PoolSlugParam, PoolsSummary,
BlockCountParam, BlockFeesEntry, BlockInfoV1, BlockRewardsEntry, BlockSizesWeights,
DifficultyAdjustmentEntry, HashrateSummary, PoolDetail,
PoolHashrateEntry, PoolInfo, PoolSlugAndHeightParam, PoolSlugParam, PoolsSummary,
RewardStats, TimePeriodParam,
};
@@ -25,23 +26,6 @@ impl MiningRoutes for ApiRouter<AppState> {
"/api/v1/mining",
get(Redirect::temporary("/api#tag/mining")),
)
.api_route(
"/api/v1/difficulty-adjustment",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, |q| q.difficulty_adjustment()).await
},
|op| {
op.id("get_difficulty_adjustment")
.mining_tag()
.summary("Difficulty adjustment")
.description("Get current difficulty adjustment information including progress through the current epoch, estimated retarget date, and difficulty change prediction.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-difficulty-adjustment)*")
.ok_response::<DifficultyAdjustment>()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/v1/mining/pools",
get_with(
@@ -54,7 +38,7 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("List all mining pools")
.description("Get list of all known mining pools with their identifiers.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mining-pools)*")
.ok_response::<Vec<PoolInfo>>()
.json_response::<Vec<PoolInfo>>()
.not_modified()
.server_error()
},
@@ -71,7 +55,7 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("Mining pool statistics")
.description("Get mining pool statistics for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mining-pools)*")
.ok_response::<PoolsSummary>()
.json_response::<PoolsSummary>()
.not_modified()
.server_error()
},
@@ -88,7 +72,95 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("Mining pool details")
.description("Get detailed information about a specific mining pool including block counts and shares for different time periods.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mining-pool)*")
.ok_response::<PoolDetail>()
.json_response::<PoolDetail>()
.not_modified()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/v1/mining/hashrate/pools",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, |q| q.pools_hashrate(None)).await
},
|op| {
op.id("get_pools_hashrate")
.mining_tag()
.summary("All pools hashrate (all time)")
.description("Get hashrate data for all mining pools.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mining-pool-hashrates)*")
.json_response::<Vec<PoolHashrateEntry>>()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/v1/mining/hashrate/pools/{time_period}",
get_with(
async |uri: Uri, headers: HeaderMap, Path(path): Path<TimePeriodParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.pools_hashrate(Some(path.time_period))).await
},
|op| {
op.id("get_pools_hashrate_by_period")
.mining_tag()
.summary("All pools hashrate")
.description("Get hashrate data for all mining pools for a time period. Valid periods: 1m, 3m, 6m, 1y, 2y, 3y\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mining-pool-hashrates)*")
.json_response::<Vec<PoolHashrateEntry>>()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/v1/mining/pool/{slug}/hashrate",
get_with(
async |uri: Uri, headers: HeaderMap, Path(path): Path<PoolSlugParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.pool_hashrate(path.slug)).await
},
|op| {
op.id("get_pool_hashrate")
.mining_tag()
.summary("Mining pool hashrate")
.description("Get hashrate history for a specific mining pool.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mining-pool-hashrate)*")
.json_response::<Vec<PoolHashrateEntry>>()
.not_modified()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/v1/mining/pool/{slug}/blocks",
get_with(
async |uri: Uri, headers: HeaderMap, Path(path): Path<PoolSlugParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.pool_blocks(path.slug, None)).await
},
|op| {
op.id("get_pool_blocks")
.mining_tag()
.summary("Mining pool blocks")
.description("Get the 10 most recent blocks mined by a specific pool.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mining-pool-blocks)*")
.json_response::<Vec<BlockInfoV1>>()
.not_modified()
.not_found()
.server_error()
},
),
)
.api_route(
"/api/v1/mining/pool/{slug}/blocks/{height}",
get_with(
async |uri: Uri, headers: HeaderMap, Path(PoolSlugAndHeightParam {slug, height}): Path<PoolSlugAndHeightParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.pool_blocks(slug, Some(height))).await
},
|op| {
op.id("get_pool_blocks_from")
.mining_tag()
.summary("Mining pool blocks from height")
.description("Get 10 blocks mined by a specific pool before (and including) the given height.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-mining-pool-blocks)*")
.json_response::<Vec<BlockInfoV1>>()
.not_modified()
.not_found()
.server_error()
@@ -106,7 +178,7 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("Network hashrate (all time)")
.description("Get network hashrate and difficulty data for all time.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-hashrate)*")
.ok_response::<HashrateSummary>()
.json_response::<HashrateSummary>()
.not_modified()
.server_error()
},
@@ -123,7 +195,7 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("Network hashrate")
.description("Get network hashrate and difficulty data for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-hashrate)*")
.ok_response::<HashrateSummary>()
.json_response::<HashrateSummary>()
.not_modified()
.server_error()
},
@@ -140,7 +212,7 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("Difficulty adjustments (all time)")
.description("Get historical difficulty adjustments including timestamp, block height, difficulty value, and percentage change.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-difficulty-adjustments)*")
.ok_response::<Vec<DifficultyAdjustmentEntry>>()
.json_response::<Vec<DifficultyAdjustmentEntry>>()
.not_modified()
.server_error()
},
@@ -157,7 +229,24 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("Difficulty adjustments")
.description("Get historical difficulty adjustments for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-difficulty-adjustments)*")
.ok_response::<Vec<DifficultyAdjustmentEntry>>()
.json_response::<Vec<DifficultyAdjustmentEntry>>()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/v1/mining/reward-stats/{block_count}",
get_with(
async |uri: Uri, headers: HeaderMap, Path(path): Path<BlockCountParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.reward_stats(path.block_count)).await
},
|op| {
op.id("get_reward_stats")
.mining_tag()
.summary("Mining reward statistics")
.description("Get mining reward statistics for the last N blocks including total rewards, fees, and transaction count.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-reward-stats)*")
.json_response::<RewardStats>()
.not_modified()
.server_error()
},
@@ -174,7 +263,7 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("Block fees")
.description("Get average block fees for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-fees)*")
.ok_response::<Vec<BlockFeesEntry>>()
.json_response::<Vec<BlockFeesEntry>>()
.not_modified()
.server_error()
},
@@ -191,7 +280,7 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("Block rewards")
.description("Get average block rewards (coinbase = subsidy + fees) for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-block-rewards)*")
.ok_response::<Vec<BlockRewardsEntry>>()
.json_response::<Vec<BlockRewardsEntry>>()
.not_modified()
.server_error()
},
@@ -223,24 +312,7 @@ impl MiningRoutes for ApiRouter<AppState> {
.mining_tag()
.summary("Block sizes and weights")
.description("Get average block sizes and weights for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-sizes-weights)*")
.ok_response::<BlockSizesWeights>()
.not_modified()
.server_error()
},
),
)
.api_route(
"/api/v1/mining/reward-stats/{block_count}",
get_with(
async |uri: Uri, headers: HeaderMap, Path(path): Path<BlockCountParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.reward_stats(path.block_count)).await
},
|op| {
op.id("get_reward_stats")
.mining_tag()
.summary("Mining reward statistics")
.description("Get mining reward statistics for the last N blocks including total rewards, fees, and transaction count.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-reward-stats)*")
.ok_response::<RewardStats>()
.json_response::<BlockSizesWeights>()
.not_modified()
.server_error()
},

View File

@@ -14,9 +14,9 @@ use axum::{
use crate::{
Error,
api::{
addrs::AddrRoutes, blocks::BlockRoutes, mempool::MempoolRoutes,
metrics_legacy::ApiMetricsLegacyRoutes, mining::MiningRoutes, series::ApiSeriesRoutes,
server::ServerRoutes, transactions::TxRoutes,
addrs::AddrRoutes, blocks::BlockRoutes, fees::FeesRoutes, general::GeneralRoutes,
mempool::MempoolRoutes, metrics_legacy::ApiMetricsLegacyRoutes, mining::MiningRoutes,
series::ApiSeriesRoutes, server::ServerRoutes, transactions::TxRoutes,
},
extended::{ResponseExtended, TransformResponseExtended},
};
@@ -25,6 +25,8 @@ use super::AppState;
mod addrs;
mod blocks;
mod fees;
mod general;
mod mempool;
mod metrics_legacy;
mod mining;
@@ -44,11 +46,13 @@ impl ApiRoutes for ApiRouter<AppState> {
self.add_server_routes()
.add_series_routes()
.add_metrics_legacy_routes()
.add_block_routes()
.add_tx_routes()
.add_general_routes()
.add_addr_routes()
.add_mempool_routes()
.add_block_routes()
.add_mining_routes()
.add_fees_routes()
.add_mempool_routes()
.add_tx_routes()
.route("/api/server", get(Redirect::temporary("/api#tag/server")))
.api_route(
"/openapi.json",
@@ -81,7 +85,7 @@ impl ApiRoutes for ApiRouter<AppState> {
Removes redundant fields while preserving essential API information. \
Full spec available at `/openapi.json`.",
)
.ok_response::<serde_json::Value>()
.json_response::<serde_json::Value>()
},
),
)

View File

@@ -22,22 +22,22 @@ pub fn create_openapi() -> OpenApi {
let info = Info {
title: "Bitcoin Research Kit".to_string(),
description: Some(
r#"API for querying Bitcoin blockchain data and on-chain series.
r#"API for querying Bitcoin blockchain data, mempool state, and on-chain series.
### Features
- **Series**: Thousands of time-series across multiple indexes (date, block height, etc.)
- **[Mempool.space](https://mempool.space/docs/api/rest) compatible** (WIP): Most non-series endpoints follow the mempool.space API format
- **[Mempool.space](https://mempool.space/docs/api/rest) compatible**: Blocks, transactions, addresses, mining, fees, and mempool endpoints match the mempool.space REST API
- **Series**: Thousands of on-chain time-series across multiple indexes (date, block height, etc.)
- **Multiple formats**: JSON and CSV output
- **LLM-optimized**: [`/llms.txt`](/llms.txt) for discovery, [`/api.json`](/api.json) compact OpenAPI spec for tool use (full spec at [`/openapi.json`](/openapi.json))
### Quick start
```bash
curl -s https://bitview.space/api/block-height/0
curl -s https://bitview.space/api/blocks/tip/height
curl -s https://bitview.space/api/v1/fees/recommended
curl -s https://bitview.space/api/mempool
curl -s https://bitview.space/api/series/search?q=price
curl -s https://bitview.space/api/series/price/day
curl -s https://bitview.space/api/series/price/day/latest
```
### Errors
@@ -50,7 +50,7 @@ All errors return structured JSON with a consistent format:
"type": "not_found",
"code": "series_not_found",
"message": "'foo' not found, did you mean 'bar'?",
"doc_url": "https://bitcoinresearchkit.org/api"
"doc_url": "/api"
}
}
```
@@ -108,21 +108,10 @@ All errors return structured JSON with a consistent format:
..Default::default()
},
Tag {
name: "Blocks".to_string(),
name: "General".to_string(),
description: Some(
"Retrieve block data by hash or height. Access block headers, transaction lists, \
and raw block bytes.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible (WIP).*"
.to_string(),
),
..Default::default()
},
Tag {
name: "Transactions".to_string(),
description: Some(
"Retrieve transaction data by txid. Access full transaction details, confirmation \
status, raw hex, and output spend information.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible (WIP).*"
"General Bitcoin network information including difficulty adjustments and price data.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible.*"
.to_string(),
),
..Default::default()
@@ -132,17 +121,17 @@ All errors return structured JSON with a consistent format:
description: Some(
"Query Bitcoin address data including balances, transaction history, and UTXOs. \
Supports all address types: P2PKH, P2SH, P2WPKH, P2WSH, and P2TR.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible (WIP).*"
*[Mempool.space](https://mempool.space/docs/api/rest) compatible.*"
.to_string(),
),
..Default::default()
},
Tag {
name: "Mempool".to_string(),
name: "Blocks".to_string(),
description: Some(
"Monitor unconfirmed transactions and fee estimates. Get mempool statistics, \
transaction IDs, and recommended fee rates for different confirmation targets.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible (WIP).*"
"Retrieve block data by hash or height. Access block headers, transaction lists, \
and raw block bytes.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible.*"
.to_string(),
),
..Default::default()
@@ -152,16 +141,43 @@ All errors return structured JSON with a consistent format:
description: Some(
"Mining statistics including pool distribution, hashrate, difficulty adjustments, \
block rewards, and fee rates across configurable time periods.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible (WIP).*"
*[Mempool.space](https://mempool.space/docs/api/rest) compatible.*"
.to_string(),
),
..Default::default()
},
Tag {
name: "Fees".to_string(),
description: Some(
"Fee estimation and projected mempool blocks.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible.*"
.to_string(),
),
..Default::default()
},
Tag {
name: "Mempool".to_string(),
description: Some(
"Monitor unconfirmed transactions. Get mempool statistics, \
transaction IDs, fee histogram, and recent transactions.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible.*"
.to_string(),
),
..Default::default()
},
Tag {
name: "Transactions".to_string(),
description: Some(
"Retrieve transaction data by txid. Access full transaction details, confirmation \
status, raw hex, merkle proofs, and output spend information.\n\n\
*[Mempool.space](https://mempool.space/docs/api/rest) compatible.*"
.to_string(),
),
..Default::default()
},
Tag {
name: "Metrics".to_string(),
description: Some(
"Deprecated — use Series".to_string(),
),
description: Some("Deprecated — use Series".to_string()),
extensions: [("deprecated".to_string(), serde_json::Value::Bool(true))].into(),
..Default::default()
},

View File

@@ -47,6 +47,12 @@
telemetry: false,
withDefaultFonts: false,
// showToolbar: "never",
mcp: {
name: "mcp",
url: "https://mcp.example.com",
disabled: true,
},
// showDeveloperTools: "never",
agent: {
disabled: true,
},

File diff suppressed because one or more lines are too long

View File

@@ -59,7 +59,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
"Returns the complete hierarchical catalog of available series organized as a tree structure. \
Series are grouped by categories and subcategories."
)
.ok_response::<TreeNode>()
.json_response::<TreeNode>()
.not_modified(),
),
)
@@ -78,7 +78,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.series_tag()
.summary("Series count")
.description("Returns the number of series available per index type.")
.ok_response::<Vec<SeriesCount>>()
.json_response::<Vec<SeriesCount>>()
.not_modified(),
),
)
@@ -99,7 +99,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.description(
"Returns all available indexes with their accepted query aliases. Use any alias when querying series."
)
.ok_response::<Vec<IndexInfo>>()
.json_response::<Vec<IndexInfo>>()
.not_modified(),
),
)
@@ -119,7 +119,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.series_tag()
.summary("Series list")
.description("Paginated flat list of all available series names. Use `page` query param for pagination.")
.ok_response::<PaginatedSeries>()
.json_response::<PaginatedSeries>()
.not_modified(),
),
)
@@ -139,7 +139,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.series_tag()
.summary("Search series")
.description("Fuzzy search for series by name. Supports partial matches and typos.")
.ok_response::<Vec<&str>>()
.json_response::<Vec<&str>>()
.not_modified()
.server_error(),
),
@@ -164,7 +164,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.description(
"Returns the supported indexes and value type for the specified series."
)
.ok_response::<SeriesInfo>()
.json_response::<SeriesInfo>()
.not_modified()
.not_found()
.server_error(),
@@ -198,7 +198,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
"Fetch data for a specific series at the given index. \
Use query parameters to filter by date range and format (json/csv)."
)
.ok_response::<SeriesData>()
.json_response::<SeriesData>()
.csv_response()
.not_modified()
.not_found(),
@@ -232,7 +232,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
"Returns just the data array without the SeriesData wrapper. \
Supports the same range and format parameters as the standard endpoint."
)
.ok_response::<Vec<serde_json::Value>>()
.json_response::<Vec<serde_json::Value>>()
.csv_response()
.not_modified()
.not_found(),
@@ -258,7 +258,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.description(
"Returns the single most recent value for a series, unwrapped (not inside a SeriesData object)."
)
.ok_response::<serde_json::Value>()
.json_response::<serde_json::Value>()
.not_found(),
),
)
@@ -280,7 +280,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.series_tag()
.summary("Get series data length")
.description("Returns the total number of data points for a series at the given index.")
.ok_response::<usize>()
.json_response::<usize>()
.not_found(),
),
)
@@ -302,7 +302,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.series_tag()
.summary("Get series version")
.description("Returns the current version of a series. Changes when the series data is updated.")
.ok_response::<brk_types::Version>()
.json_response::<brk_types::Version>()
.not_found(),
),
)
@@ -320,7 +320,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
"Fetch multiple series in a single request. Supports filtering by index and date range. \
Returns an array of SeriesData objects. For a single series, use `get_series` instead."
)
.ok_response::<Vec<SeriesData>>()
.json_response::<Vec<SeriesData>>()
.csv_response()
.not_modified(),
),
@@ -339,7 +339,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.series_tag()
.summary("Available cost basis cohorts")
.description("List available cohorts for cost basis distribution.")
.ok_response::<Vec<String>>()
.json_response::<Vec<String>>()
.server_error()
},
),
@@ -362,7 +362,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
.series_tag()
.summary("Available cost basis dates")
.description("List available dates for a cohort's cost basis distribution.")
.ok_response::<Vec<Date>>()
.json_response::<Vec<Date>>()
.not_found()
.server_error()
},
@@ -397,7 +397,7 @@ impl ApiSeriesRoutes for ApiRouter<AppState> {
- `bucket`: raw (default), lin200, lin500, lin1000, log10, log50, log100\n\
- `value`: supply (default, in BTC), realized (USD), unrealized (USD)",
)
.ok_response::<CostBasisFormatted>()
.json_response::<CostBasisFormatted>()
.not_found()
.server_error()
},

View File

@@ -48,7 +48,7 @@ impl ServerRoutes for ApiRouter<AppState> {
.server_tag()
.summary("Health check")
.description("Returns the health status of the API server, including uptime information.")
.ok_response::<Health>()
.json_response::<Health>()
},
),
)
@@ -67,7 +67,7 @@ impl ServerRoutes for ApiRouter<AppState> {
.server_tag()
.summary("API version")
.description("Returns the current version of the API server")
.ok_response::<String>()
.json_response::<String>()
.not_modified()
},
),
@@ -91,7 +91,7 @@ impl ServerRoutes for ApiRouter<AppState> {
"Returns the sync status of the indexer, including indexed height, \
tip height, blocks behind, and last indexed timestamp.",
)
.ok_response::<SyncStatus>()
.json_response::<SyncStatus>()
.not_modified()
},
),
@@ -116,7 +116,7 @@ impl ServerRoutes for ApiRouter<AppState> {
.description(
"Returns the disk space used by BRK and Bitcoin data.",
)
.ok_response::<DiskUsage>()
.json_response::<DiskUsage>()
.not_modified()
},
),

View File

@@ -1,11 +1,15 @@
use aide::axum::{ApiRouter, routing::get_with};
use aide::axum::{
ApiRouter,
routing::{get_with, post_with},
};
use axum::{
extract::{Path, State},
http::{HeaderMap, Uri},
response::Redirect,
routing::get,
};
use brk_types::{Hex, Transaction, TxOutspend, TxStatus, TxidParam, TxidVout};
use brk_types::{
CpfpInfo, MerkleProof, Transaction, TxOutspend, TxStatus, Txid, TxidParam, TxidVout,
TxidsParam,
};
use crate::{CacheStrategy, extended::TransformResponseExtended};
@@ -18,9 +22,23 @@ pub trait TxRoutes {
impl TxRoutes for ApiRouter<AppState> {
fn add_tx_routes(self) -> Self {
self
.route("/api/tx", get(Redirect::temporary("/api/transactions")))
.route("/api/transactions", get(Redirect::temporary("/api#tag/transactions")))
.api_route(
"/api/v1/cpfp/{txid}",
get_with(
async |uri: Uri, headers: HeaderMap, Path(txid): Path<TxidParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::MempoolHash(0), &uri, move |q| q.cpfp(txid)).await
},
|op| op
.id("get_cpfp")
.transactions_tag()
.summary("CPFP info")
.description("Returns ancestors and descendants for a CPFP transaction.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-children-pay-for-parent)*")
.json_response::<CpfpInfo>()
.not_found()
.server_error(),
),
)
.api_route(
"/api/tx/{txid}",
get_with(
async |
@@ -38,32 +56,7 @@ impl TxRoutes for ApiRouter<AppState> {
.description(
"Retrieve complete transaction data by transaction ID (txid). Returns inputs, outputs, fee, size, and confirmation status.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction)*",
)
.ok_response::<Transaction>()
.not_modified()
.bad_request()
.not_found()
.server_error(),
),
)
.api_route(
"/api/tx/{txid}/status",
get_with(
async |
uri: Uri,
headers: HeaderMap,
Path(txid): Path<TxidParam>,
State(state): State<AppState>
| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.transaction_status(txid)).await
},
|op| op
.id("get_tx_status")
.transactions_tag()
.summary("Transaction status")
.description(
"Retrieve the confirmation status of a transaction. Returns whether the transaction is confirmed and, if so, the block height, hash, and timestamp.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction-status)*",
)
.ok_response::<TxStatus>()
.json_response::<Transaction>()
.not_modified()
.bad_request()
.not_found()
@@ -88,7 +81,43 @@ impl TxRoutes for ApiRouter<AppState> {
.description(
"Retrieve the raw transaction as a hex-encoded string. Returns the serialized transaction in hexadecimal format.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction-hex)*",
)
.ok_response::<Hex>()
.text_response()
.not_modified()
.bad_request()
.not_found()
.server_error(),
),
)
.api_route(
"/api/tx/{txid}/merkleblock-proof",
get_with(
async |uri: Uri, headers: HeaderMap, Path(txid): Path<TxidParam>, State(state): State<AppState>| {
state.cached_text(&headers, CacheStrategy::Height, &uri, move |q| q.merkleblock_proof(txid)).await
},
|op| op
.id("get_tx_merkleblock_proof")
.transactions_tag()
.summary("Transaction merkleblock proof")
.description("Get the merkleblock proof for a transaction (BIP37 format, hex encoded).\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction-merkleblock-proof)*")
.text_response()
.not_modified()
.bad_request()
.not_found()
.server_error(),
),
)
.api_route(
"/api/tx/{txid}/merkle-proof",
get_with(
async |uri: Uri, headers: HeaderMap, Path(txid): Path<TxidParam>, State(state): State<AppState>| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.merkle_proof(txid)).await
},
|op| op
.id("get_tx_merkle_proof")
.transactions_tag()
.summary("Transaction merkle proof")
.description("Get the merkle inclusion proof for a transaction.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction-merkle-proof)*")
.json_response::<MerkleProof>()
.not_modified()
.bad_request()
.not_found()
@@ -114,7 +143,7 @@ impl TxRoutes for ApiRouter<AppState> {
.description(
"Get the spending status of a transaction output. Returns whether the output has been spent and, if so, the spending transaction details.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction-outspend)*",
)
.ok_response::<TxOutspend>()
.json_response::<TxOutspend>()
.not_modified()
.bad_request()
.not_found()
@@ -139,12 +168,91 @@ impl TxRoutes for ApiRouter<AppState> {
.description(
"Get the spending status of all outputs in a transaction. Returns an array with the spend status for each output.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction-outspends)*",
)
.ok_response::<Vec<TxOutspend>>()
.json_response::<Vec<TxOutspend>>()
.not_modified()
.bad_request()
.not_found()
.server_error(),
),
)
.api_route(
"/api/tx/{txid}/raw",
get_with(
async |uri: Uri, headers: HeaderMap, Path(txid): Path<TxidParam>, State(state): State<AppState>| {
state.cached_bytes(&headers, CacheStrategy::Height, &uri, move |q| q.transaction_raw(txid)).await
},
|op| op
.id("get_tx_raw")
.transactions_tag()
.summary("Transaction raw")
.description("Returns a transaction as binary data.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction-raw)*")
.json_response::<Vec<u8>>()
.not_modified()
.bad_request()
.not_found()
.server_error(),
),
)
.api_route(
"/api/tx/{txid}/status",
get_with(
async |
uri: Uri,
headers: HeaderMap,
Path(txid): Path<TxidParam>,
State(state): State<AppState>
| {
state.cached_json(&headers, CacheStrategy::Height, &uri, move |q| q.transaction_status(txid)).await
},
|op| op
.id("get_tx_status")
.transactions_tag()
.summary("Transaction status")
.description(
"Retrieve the confirmation status of a transaction. Returns whether the transaction is confirmed and, if so, the block height, hash, and timestamp.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction-status)*",
)
.json_response::<TxStatus>()
.not_modified()
.bad_request()
.not_found()
.server_error(),
),
)
.api_route(
"/api/v1/transaction-times",
get_with(
async |uri: Uri, headers: HeaderMap, State(state): State<AppState>| {
let params = TxidsParam::from_query(uri.query().unwrap_or(""));
state.cached_json(&headers, CacheStrategy::MempoolHash(0), &uri, move |q| q.transaction_times(&params.txids)).await
},
|op| op
.id("get_transaction_times")
.transactions_tag()
.summary("Transaction first-seen times")
.description("Returns timestamps when transactions were first seen in the mempool. Returns 0 for mined or unknown transactions.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#get-transaction-times)*")
.json_response::<Vec<u64>>()
.server_error(),
),
)
.api_route(
"/api/tx",
post_with(
async |State(state): State<AppState>, body: String| {
let hex = body.trim().to_string();
state.sync(|q| q.broadcast_transaction(&hex))
.map(|txid| txid.to_string())
.map_err(crate::Error::from)
},
|op| {
op.id("post_tx")
.transactions_tag()
.summary("Broadcast transaction")
.description("Broadcast a raw transaction to the network. The transaction should be provided as hex in the request body. The txid will be returned on success.\n\n*[Mempool.space docs](https://mempool.space/docs/api/rest#post-transaction)*")
.json_response::<Txid>()
.bad_request()
.server_error()
},
),
)
}
}

View File

@@ -1,3 +1,4 @@
use aide::OperationOutput;
use axum::{
http::{StatusCode, header},
response::{IntoResponse, Response},
@@ -157,6 +158,10 @@ impl From<BrkError> for Error {
}
}
impl OperationOutput for Error {
type Inner = ();
}
impl IntoResponse for Error {
fn into_response(self) -> Response {
let body = build_error_body(self.status, self.code, self.message);

View File

@@ -6,27 +6,31 @@ use schemars::JsonSchema;
use crate::error::ErrorBody;
pub trait TransformResponseExtended<'t> {
fn general_tag(self) -> Self;
fn addrs_tag(self) -> Self;
fn blocks_tag(self) -> Self;
fn mempool_tag(self) -> Self;
fn metrics_tag(self) -> Self;
fn mining_tag(self) -> Self;
fn series_tag(self) -> Self;
fn server_tag(self) -> Self;
fn fees_tag(self) -> Self;
fn mempool_tag(self) -> Self;
fn transactions_tag(self) -> Self;
fn server_tag(self) -> Self;
fn series_tag(self) -> Self;
fn metrics_tag(self) -> Self;
/// Mark operation as deprecated
fn deprecated(self) -> Self;
/// 200
fn ok_response<R>(self) -> Self
fn json_response<R>(self) -> Self
where
R: JsonSchema;
/// 200
fn ok_response_with<R, F>(self, f: F) -> Self
fn json_response_with<R, F>(self, f: F) -> Self
where
R: JsonSchema,
F: FnOnce(TransformResponse<'_, R>) -> TransformResponse<'_, R>;
/// 200 with text/plain content type
fn text_response(self) -> Self;
/// 200 with text/csv content type (adds CSV as alternative response format)
fn csv_response(self) -> Self;
/// 400
@@ -40,6 +44,10 @@ pub trait TransformResponseExtended<'t> {
}
impl<'t> TransformResponseExtended<'t> for TransformOperation<'t> {
fn general_tag(self) -> Self {
self.tag("General")
}
fn addrs_tag(self) -> Self {
self.tag("Addresses")
}
@@ -48,35 +56,39 @@ impl<'t> TransformResponseExtended<'t> for TransformOperation<'t> {
self.tag("Blocks")
}
fn mempool_tag(self) -> Self {
self.tag("Mempool")
}
fn metrics_tag(self) -> Self {
self.tag("Metrics")
}
fn series_tag(self) -> Self {
self.tag("Series")
}
fn mining_tag(self) -> Self {
self.tag("Mining")
}
fn server_tag(self) -> Self {
self.tag("Server")
fn fees_tag(self) -> Self {
self.tag("Fees")
}
fn mempool_tag(self) -> Self {
self.tag("Mempool")
}
fn transactions_tag(self) -> Self {
self.tag("Transactions")
}
fn ok_response<R>(self) -> Self
fn server_tag(self) -> Self {
self.tag("Server")
}
fn series_tag(self) -> Self {
self.tag("Series")
}
fn metrics_tag(self) -> Self {
self.tag("Metrics")
}
fn json_response<R>(self) -> Self
where
R: JsonSchema,
{
self.ok_response_with(|r: TransformResponse<'_, R>| r)
self.json_response_with(|r: TransformResponse<'_, R>| r)
}
fn deprecated(mut self) -> Self {
@@ -84,7 +96,7 @@ impl<'t> TransformResponseExtended<'t> for TransformOperation<'t> {
self
}
fn ok_response_with<R, F>(self, f: F) -> Self
fn json_response_with<R, F>(self, f: F) -> Self
where
R: JsonSchema,
F: FnOnce(TransformResponse<'_, R>) -> TransformResponse<'_, R>,
@@ -92,6 +104,10 @@ impl<'t> TransformResponseExtended<'t> for TransformOperation<'t> {
self.response_with::<200, Json<R>, _>(|res| f(res.description("Successful response")))
}
fn text_response(self) -> Self {
self.response_with::<200, String, _>(|res| res.description("Successful response"))
}
fn csv_response(mut self) -> Self {
// Add text/csv content type to existing 200 response
if let Some(responses) = &mut self.inner_mut().responses

View File

@@ -2,15 +2,10 @@
pub enum Kind {
Recent,
Random,
Sequential,
Vec,
}
impl Kind {
pub fn is_sequential(&self) -> bool {
matches!(*self, Self::Sequential)
}
pub fn is_recent(&self) -> bool {
matches!(*self, Self::Recent)
}

View File

@@ -137,13 +137,6 @@ where
FilterPolicyEntry::Bloom(BloomConstructionPolicy::BitsPerKey(7.0)),
]));
}
Kind::Sequential => {
options = options
.filter_block_partitioning_policy(PartitioningPolicy::all(true))
.index_block_partitioning_policy(PartitioningPolicy::all(true))
.filter_block_pinning_policy(PinningPolicy::all(false))
.index_block_pinning_policy(PinningPolicy::all(false));
}
Kind::Vec => {
options = options
.max_memtable_size(8 * 1024 * 1024)

View File

@@ -1,11 +1,9 @@
use std::borrow::Cow;
use bitcoin::hashes::{Hash, HashEngine};
use derive_more::Deref;
use crate::BlkMetadata;
use super::{BlockHash, Height};
use super::{BlockHash, CoinbaseTag, Height};
/// Raw block bytes and per-tx offsets for fast txid hashing.
/// Present when block was parsed from blk*.dat files, absent for RPC blocks.
@@ -110,15 +108,15 @@ impl Block {
bitcoin::Txid::from_engine(engine)
}
pub fn coinbase_tag(&self) -> Cow<'_, str> {
String::from_utf8_lossy(
self.txdata
.first()
.and_then(|tx| tx.input.first())
.unwrap()
.script_sig
.as_bytes(),
)
pub fn coinbase_tag(&self) -> CoinbaseTag {
let bytes = self
.txdata
.first()
.and_then(|tx| tx.input.first())
.unwrap()
.script_sig
.as_bytes();
CoinbaseTag::from(bytes)
}
}

View File

@@ -0,0 +1,109 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{BlockPool, FeeRate, Sats, Weight};
/// Extended block data matching mempool.space /api/v1/blocks extras
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct BlockExtras {
/// Total fees in satoshis
#[serde(rename = "totalFees")]
pub total_fees: Sats,
/// Median fee rate in sat/vB
#[serde(rename = "medianFee")]
pub median_fee: FeeRate,
/// Fee rate range: [min, 10%, 25%, 50%, 75%, 90%, max]
#[serde(rename = "feeRange")]
pub fee_range: [FeeRate; 7],
/// Total block reward (subsidy + fees) in satoshis
pub reward: Sats,
/// Mining pool that mined this block
pub pool: BlockPool,
/// Average fee per transaction in satoshis
#[serde(rename = "avgFee")]
pub avg_fee: Sats,
/// Average fee rate in sat/vB
#[serde(rename = "avgFeeRate")]
pub avg_fee_rate: FeeRate,
/// Raw coinbase transaction scriptsig as hex
#[serde(rename = "coinbaseRaw")]
pub coinbase_raw: String,
/// Primary coinbase output address
#[serde(rename = "coinbaseAddress")]
pub coinbase_address: Option<String>,
/// All coinbase output addresses
#[serde(rename = "coinbaseAddresses")]
pub coinbase_addresses: Vec<String>,
/// Coinbase output script in ASM format
#[serde(rename = "coinbaseSignature")]
pub coinbase_signature: String,
/// Coinbase scriptsig decoded as ASCII
#[serde(rename = "coinbaseSignatureAscii")]
pub coinbase_signature_ascii: String,
/// Average transaction size in bytes
#[serde(rename = "avgTxSize")]
pub avg_tx_size: f64,
/// Total number of inputs (excluding coinbase)
#[serde(rename = "totalInputs")]
pub total_inputs: u64,
/// Total number of outputs
#[serde(rename = "totalOutputs")]
pub total_outputs: u64,
/// Total output amount in satoshis
#[serde(rename = "totalOutputAmt")]
pub total_output_amt: Sats,
/// Median fee amount in satoshis
#[serde(rename = "medianFeeAmt")]
pub median_fee_amt: Sats,
/// Fee amount percentiles in satoshis: [min, 10%, 25%, 50%, 75%, 90%, max]
#[serde(rename = "feePercentiles")]
pub fee_percentiles: [Sats; 7],
/// Number of segwit transactions
#[serde(rename = "segwitTotalTxs")]
pub segwit_total_txs: u32,
/// Total size of segwit transactions in bytes
#[serde(rename = "segwitTotalSize")]
pub segwit_total_size: u64,
/// Total weight of segwit transactions
#[serde(rename = "segwitTotalWeight")]
pub segwit_total_weight: Weight,
/// Raw 80-byte block header as hex
pub header: String,
/// UTXO set change (outputs created minus inputs spent)
#[serde(rename = "utxoSetChange")]
pub utxo_set_change: i64,
/// Total UTXO set size at this height
#[serde(rename = "utxoSetSize")]
pub utxo_set_size: u64,
/// Total input amount in satoshis
#[serde(rename = "totalInputAmt")]
pub total_input_amt: Sats,
/// Virtual size in vbytes
#[serde(rename = "virtualSize")]
pub virtual_size: f64,
}

View File

@@ -0,0 +1,43 @@
use bitcoin::block::Header;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::BlockHash;
/// Block header matching mempool.space's format.
/// Contains the same fields as bitcoin::block::Header
/// but serialized for the JSON API.
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct BlockHeader {
/// Block version, used for soft fork signaling
pub version: u32,
/// Previous block hash
#[serde(rename = "previousblockhash")]
pub previous_block_hash: BlockHash,
/// Merkle root of the transaction tree
pub merkle_root: String,
/// Block timestamp as claimed by the miner (Unix time)
pub time: u32,
/// Compact target (bits)
pub bits: u32,
/// Nonce used to produce a valid block hash
pub nonce: u32,
}
impl From<Header> for BlockHeader {
fn from(h: Header) -> Self {
Self {
version: h.version.to_consensus() as u32,
previous_block_hash: BlockHash::from(h.prev_blockhash),
merkle_root: h.merkle_root.to_string(),
time: h.time,
bits: h.bits.to_consensus(),
nonce: h.nonce,
}
}
}

View File

@@ -1,9 +1,9 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{BlockHash, Height, Timestamp, Weight};
use crate::{BlockHash, BlockHeader, Height, Timestamp, Weight};
/// Block information returned by the API
/// Block information matching mempool.space /api/block/{hash}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct BlockInfo {
/// Block hash
@@ -12,6 +12,13 @@ pub struct BlockInfo {
/// Block height
pub height: Height,
/// Block header fields
#[serde(flatten)]
pub header: BlockHeader,
/// Block timestamp (Unix time)
pub timestamp: Timestamp,
/// Number of transactions in the block
pub tx_count: u32,
@@ -21,9 +28,10 @@ pub struct BlockInfo {
/// Block weight in weight units
pub weight: Weight,
/// Block timestamp (Unix time)
pub timestamp: Timestamp,
/// Median time of the last 11 blocks
#[serde(rename = "mediantime")]
pub median_time: Timestamp,
/// Block difficulty as a floating point number
/// Block difficulty
pub difficulty: f64,
}

View File

@@ -0,0 +1,15 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{BlockExtras, BlockInfo};
/// Block information with extras, matching mempool.space /api/v1/blocks
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct BlockInfoV1 {
/// Base block information
#[serde(flatten)]
pub info: BlockInfo,
/// Extended block data
pub extras: BlockExtras,
}

View File

@@ -0,0 +1,17 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::PoolSlug;
/// Mining pool identification for a block
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct BlockPool {
/// Unique pool identifier
pub id: u8,
/// Pool name
pub name: String,
/// URL-friendly pool identifier
pub slug: PoolSlug,
}

View File

@@ -0,0 +1,93 @@
use derive_more::Deref;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use vecdb::{Bytes, Formattable};
/// Coinbase scriptSig tag for pool identification.
///
/// Stored as a fixed 101-byte record (1 byte length + 100 bytes data).
/// Uses `[u8; 101]` internally so that `size_of::<CoinbaseTag>()` matches
/// the serialized `Bytes::Array` size (vecdb requires this for alignment).
///
/// Bitcoin consensus limits coinbase scriptSig to 2-100 bytes.
#[derive(Debug, Deref, Clone, JsonSchema)]
pub struct CoinbaseTag(#[schemars(with = "String")] [u8; 101]);
impl Bytes for CoinbaseTag {
type Array = [u8; 101];
const IS_NATIVE_LAYOUT: bool = true;
#[inline]
fn to_bytes(&self) -> Self::Array {
self.0
}
#[inline]
fn from_bytes(bytes: &[u8]) -> vecdb::Result<Self> {
let arr: [u8; 101] = bytes.try_into().map_err(|_| vecdb::Error::WrongLength {
received: bytes.len(),
expected: 101,
})?;
Ok(Self(arr))
}
}
impl CoinbaseTag {
/// Returns the tag as a string, decoding each byte as its latin-1/Unicode codepoint.
#[inline]
pub fn as_str(&self) -> String {
let len = (self.0[0] as usize).min(100);
self.0[1..1 + len].iter().map(|&b| b as char).collect()
}
}
impl From<&[u8]> for CoinbaseTag {
#[inline]
fn from(bytes: &[u8]) -> Self {
let truncated = &bytes[..bytes.len().min(100)];
let len = truncated.len() as u8;
let mut out = [0u8; 101];
out[0] = len;
out[1..1 + len as usize].copy_from_slice(truncated);
Self(out)
}
}
impl Serialize for CoinbaseTag {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(&self.as_str())
}
}
impl<'de> Deserialize<'de> for CoinbaseTag {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s = String::deserialize(deserializer)?;
Ok(Self::from(s.as_bytes()))
}
}
impl Formattable for CoinbaseTag {
fn write_to(&self, buf: &mut Vec<u8>) {
buf.extend_from_slice(self.as_str().as_bytes());
}
fn fmt_json(&self, buf: &mut Vec<u8>) {
buf.push(b'"');
for &b in self.as_str().as_bytes() {
match b {
b'"' => buf.extend_from_slice(b"\\\""),
b'\\' => buf.extend_from_slice(b"\\\\"),
b'\n' => buf.extend_from_slice(b"\\n"),
b'\r' => buf.extend_from_slice(b"\\r"),
b'\t' => buf.extend_from_slice(b"\\t"),
0x00..=0x1f => {
buf.extend_from_slice(b"\\u00");
buf.push(b"0123456789abcdef"[(b >> 4) as usize]);
buf.push(b"0123456789abcdef"[(b & 0xf) as usize]);
}
_ => buf.push(b),
}
}
buf.push(b'"');
}
}

View File

@@ -0,0 +1,21 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{FeeRate, Sats, Txid, Weight};
/// CPFP (Child Pays For Parent) information for a transaction
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct CpfpInfo {
pub ancestors: Vec<CpfpEntry>,
pub descendants: Vec<CpfpEntry>,
#[serde(rename = "effectiveFeePerVsize")]
pub effective_fee_per_vsize: FeeRate,
}
/// A transaction in a CPFP relationship
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct CpfpEntry {
pub txid: Txid,
pub weight: Weight,
pub fee: Sats,
}

View File

@@ -0,0 +1,32 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{Dollars, Timestamp};
/// Current price response matching mempool.space /api/v1/prices format
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct Prices {
pub time: Timestamp,
#[serde(rename = "USD")]
pub usd: Dollars,
}
/// Historical price response
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct HistoricalPrice {
pub prices: Vec<HistoricalPriceEntry>,
#[serde(rename = "exchangeRates")]
pub exchange_rates: ExchangeRates,
}
/// A single price data point
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct HistoricalPriceEntry {
pub time: u64,
#[serde(rename = "USD")]
pub usd: Dollars,
}
/// Exchange rates (USD base, on-chain only — no fiat pairs available)
#[derive(Debug, Default, Serialize, Deserialize, JsonSchema)]
pub struct ExchangeRates {}

View File

@@ -24,9 +24,13 @@ mod blk_metadata;
mod blk_position;
mod block;
mod block_count_param;
mod block_extras;
mod block_fee_rates_entry;
mod block_fees_entry;
mod block_header;
mod block_info;
mod block_info_v1;
mod block_pool;
mod block_rewards_entry;
mod block_size_entry;
mod block_sizes_weights;
@@ -44,6 +48,8 @@ mod cents_compact;
mod cents_sats;
mod cents_signed;
mod cents_squared_sats;
mod coinbase_tag;
mod cpfp;
mod cost_basis_bucket;
mod cost_basis_distribution;
mod cost_basis_params;
@@ -76,6 +82,7 @@ mod hashrate_entry;
mod hashrate_summary;
mod health;
mod height;
mod historical_price;
mod height_param;
mod hex;
mod hour1;
@@ -89,6 +96,8 @@ mod limit_param;
mod mempool_block;
mod mempool_entry_info;
mod mempool_info;
mod mempool_recent_tx;
mod merkle_proof;
mod minute10;
mod minute30;
mod month1;
@@ -124,6 +133,7 @@ mod percentile;
mod pool;
mod pool_detail;
mod pool_info;
mod pool_hashrate_entry;
mod pool_slug;
mod pool_slug_param;
mod pool_stats;
@@ -177,6 +187,7 @@ mod tx_with_hex;
mod txid;
mod txid_param;
mod txid_prefix;
mod txids_param;
mod txid_vout;
mod txin;
mod txin_index;
@@ -219,9 +230,13 @@ pub use blk_metadata::*;
pub use blk_position::*;
pub use block::*;
pub use block_count_param::*;
pub use block_extras::*;
pub use block_fee_rates_entry::*;
pub use block_fees_entry::*;
pub use block_header::*;
pub use block_info::*;
pub use block_info_v1::*;
pub use block_pool::*;
pub use block_rewards_entry::*;
pub use block_size_entry::*;
pub use block_sizes_weights::*;
@@ -239,6 +254,8 @@ pub use cents_compact::*;
pub use cents_sats::*;
pub use cents_signed::*;
pub use cents_squared_sats::*;
pub use coinbase_tag::*;
pub use cpfp::*;
pub use cost_basis_bucket::*;
pub use cost_basis_distribution::*;
pub use cost_basis_params::*;
@@ -271,6 +288,7 @@ pub use hashrate_entry::*;
pub use hashrate_summary::*;
pub use health::*;
pub use height::*;
pub use historical_price::*;
pub use height_param::*;
pub use hex::*;
pub use hour1::*;
@@ -284,6 +302,8 @@ pub use limit_param::*;
pub use mempool_block::*;
pub use mempool_entry_info::*;
pub use mempool_info::*;
pub use mempool_recent_tx::*;
pub use merkle_proof::*;
pub use minute10::*;
pub use minute30::*;
pub use month1::*;
@@ -319,6 +339,7 @@ pub use percentile::*;
pub use pool::*;
pub use pool_detail::*;
pub use pool_info::*;
pub use pool_hashrate_entry::*;
pub use pool_slug::*;
pub use pool_slug_param::*;
pub use pool_stats::*;
@@ -372,6 +393,7 @@ pub use tx_with_hex::*;
pub use txid::*;
pub use txid_param::*;
pub use txid_prefix::*;
pub use txids_param::*;
pub use txid_vout::*;
pub use txin::*;
pub use txin_index::*;

View File

@@ -1,9 +1,11 @@
use std::collections::BTreeMap;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::{Sats, Transaction, VSize};
use crate::{FeeRate, Sats, Transaction, VSize};
/// Mempool statistics
/// Mempool statistics with incrementally maintained fee histogram.
#[derive(Debug, Default, Clone, Serialize, Deserialize, JsonSchema)]
pub struct MempoolInfo {
/// Number of transactions in the mempool
@@ -12,28 +14,50 @@ pub struct MempoolInfo {
pub vsize: VSize,
/// Total fees of all transactions in the mempool (satoshis)
pub total_fee: Sats,
/// Fee histogram: `[[fee_rate, vsize], ...]` sorted by descending fee rate
#[serde(
serialize_with = "serialize_fee_histogram",
deserialize_with = "deserialize_fee_histogram"
)]
pub fee_histogram: BTreeMap<FeeRate, VSize>,
}
impl MempoolInfo {
/// Increment stats for a newly added transaction.
///
/// Fee must come from `MempoolEntryInfo` (Bitcoin Core) rather than `tx.fee`
/// because `tx.fee` may be 0 for chained mempool transactions where prevouts
/// cannot be looked up via `gettxout`.
#[inline]
pub fn add(&mut self, tx: &Transaction, fee: Sats) {
self.count += 1;
self.vsize += tx.vsize();
self.total_fee += fee;
let rate = FeeRate::from((fee, tx.vsize()));
*self.fee_histogram.entry(rate).or_insert(VSize::from(0u64)) += tx.vsize();
}
/// Decrement stats for a removed transaction.
///
/// Fee must match the fee used when the transaction was added.
#[inline]
pub fn remove(&mut self, tx: &Transaction, fee: Sats) {
self.count -= 1;
self.vsize -= tx.vsize();
self.total_fee -= fee;
let rate = FeeRate::from((fee, tx.vsize()));
if let Some(v) = self.fee_histogram.get_mut(&rate) {
*v -= tx.vsize();
if u64::from(*v) == 0 {
self.fee_histogram.remove(&rate);
}
}
}
}
fn serialize_fee_histogram<S: Serializer>(
map: &BTreeMap<FeeRate, VSize>,
serializer: S,
) -> Result<S::Ok, S::Error> {
let vec: Vec<(FeeRate, VSize)> = map.iter().rev().map(|(&r, &v)| (r, v)).collect();
vec.serialize(serializer)
}
fn deserialize_fee_histogram<'de, D: Deserializer<'de>>(
deserializer: D,
) -> Result<BTreeMap<FeeRate, VSize>, D::Error> {
let vec: Vec<(FeeRate, VSize)> = Vec::deserialize(deserializer)?;
Ok(vec.into_iter().collect())
}

View File

@@ -0,0 +1,24 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{Sats, Transaction, Txid, VSize};
/// Simplified mempool transaction for the recent transactions endpoint
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct MempoolRecentTx {
pub txid: Txid,
pub fee: Sats,
pub vsize: VSize,
pub value: Sats,
}
impl From<(&Txid, &Transaction)> for MempoolRecentTx {
fn from((txid, tx): (&Txid, &Transaction)) -> Self {
Self {
txid: txid.clone(),
fee: tx.fee,
vsize: tx.vsize(),
value: tx.output.iter().map(|o| o.value).sum(),
}
}
}

View File

@@ -0,0 +1,12 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::Height;
/// Merkle inclusion proof for a transaction
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct MerkleProof {
pub block_height: Height,
pub merkle: Vec<String>,
pub pos: usize,
}

View File

@@ -1,5 +1,3 @@
use std::ops::{Add, Div};
/// Standard percentile values used throughout BRK.
pub const PERCENTILES: [u8; 19] = [
5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95,
@@ -8,31 +6,13 @@ pub const PERCENTILES: [u8; 19] = [
/// Length of the PERCENTILES array.
pub const PERCENTILES_LEN: usize = PERCENTILES.len();
/// Get a percentile value from a sorted slice.
/// Get a percentile value from a sorted slice using nearest-rank method.
///
/// # Panics
/// Panics if the slice is empty.
pub fn get_percentile<T>(sorted: &[T], percentile: f64) -> T
where
T: Clone + Div<usize, Output = T> + Add<T, Output = T>,
{
pub fn get_percentile<T: Clone>(sorted: &[T], percentile: f64) -> T {
let len = sorted.len();
if len == 0 {
panic!("Cannot get percentile from empty slice");
} else if len == 1 {
sorted[0].clone()
} else {
let index = (len - 1) as f64 * percentile;
let fract = index.fract();
if fract != 0.0 {
let left = sorted.get(index as usize).unwrap().clone();
let right = sorted.get(index.ceil() as usize).unwrap().clone();
(left + right) / 2
} else {
sorted.get(index as usize).unwrap().clone()
}
}
assert!(len > 0, "Cannot get percentile from empty slice");
let index = ((len - 1) as f64 * percentile).round() as usize;
sorted[index].clone()
}

View File

@@ -0,0 +1,19 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use super::Timestamp;
/// A single pool hashrate data point.
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct PoolHashrateEntry {
/// Unix timestamp.
pub timestamp: Timestamp,
/// Average hashrate (H/s).
#[serde(rename = "avgHashrate")]
pub avg_hashrate: u128,
/// Pool's share of total network hashrate.
pub share: f64,
/// Pool name.
#[serde(rename = "poolName")]
pub pool_name: String,
}

View File

@@ -1,9 +1,15 @@
use schemars::JsonSchema;
use serde::Deserialize;
use super::PoolSlug;
use super::{Height, PoolSlug};
#[derive(Deserialize, JsonSchema)]
pub struct PoolSlugParam {
pub slug: PoolSlug,
}
#[derive(Deserialize, JsonSchema)]
pub struct PoolSlugAndHeightParam {
pub slug: PoolSlug,
pub height: Height,
}

View File

@@ -7,3 +7,8 @@ use crate::Timestamp;
pub struct TimestampParam {
pub timestamp: Timestamp,
}
#[derive(Deserialize, JsonSchema)]
pub struct OptionalTimestampParam {
pub timestamp: Option<Timestamp>,
}

View File

@@ -63,6 +63,14 @@ impl fmt::Display for Txid {
}
}
impl FromStr for Txid {
type Err = bitcoin::hashes::hex::HexToArrayError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
bitcoin::Txid::from_str(s).map(Self::from)
}
}
impl Serialize for Txid {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where

View File

@@ -0,0 +1,31 @@
use std::str::FromStr;
use schemars::JsonSchema;
use crate::Txid;
/// Query parameter for transaction-times endpoint.
#[derive(JsonSchema)]
pub struct TxidsParam {
#[serde(rename = "txId[]")]
pub txids: Vec<Txid>,
}
impl TxidsParam {
/// Parsed manually from URI since serde_urlencoded doesn't support repeated keys.
pub fn from_query(query: &str) -> Self {
Self {
txids: query
.split('&')
.filter_map(|pair| {
let (key, val) = pair.split_once('=')?;
if key == "txId[]" || key == "txId%5B%5D" {
Txid::from_str(val).ok()
} else {
None
}
})
.collect(),
}
}
}

View File

@@ -11,15 +11,13 @@
> "Shout out to Bitcoin Research Kit. [...] Couldn't recommend them highly enough."
> — James Check (CheckOnChain), [What Bitcoin Did #1000](https://www.whatbitcoindid.com/episodes/wbd1000-checkmate)
Open-source, self-hostable on-chain analytics for Bitcoin. Block explorer, address index, and thousands of metrics, everything computed from your node, even the price.
Open-source Bitcoin data toolkit that can parse blocks, index the chain, compute metrics, serve data and render it, all from a Bitcoin Core node. It combines what [Glassnode](https://glassnode.com) and [mempool.space](https://mempool.space) do separately into a single self-hostable package, with a built-in price oracle inspired by [UTXO Oracle](https://utxo.live/oracle/).
Similar to [Glassnode](https://glassnode.com) + [mempool.space](https://mempool.space) + [electrs](https://github.com/romanz/electrs) + [UTXO Oracle](https://utxo.live/oracle/) in a single package.
[Bitview](https://bitview.space) is a free hosted instance of BRK.
[Bitview](https://bitview.space) is the official free hosted instance of BRK.
## Data
**Zero external dependencies.** BRK needs only a Bitcoin Core node. Historical prices are built in, live price from your mempool. Every metric is computed locally from your own copy of the blockchain. Your node, your data.
**Zero external dependencies.** BRK needs only a Bitcoin Core node. 8,000+ metrics across 15 time resolutions, all computed locally from your own copy of the blockchain. Historical prices are built in, live price from your mempool. Your node, your data.
**Blockchain:** Blocks, transactions, addresses, UTXOs.
@@ -41,7 +39,7 @@ Browse metrics and charts at [bitview.space](https://bitview.space), no signup r
curl https://bitview.space/api/mempool/price
```
Query metrics and blockchain data in JSON or CSV.
Query metrics and blockchain data in JSON or CSV. No rate limit.
[Documentation](https://bitview.space/api) · [JavaScript](https://www.npmjs.com/package/brk-client) · [Python](https://pypi.org/project/brk-client) · [Rust](https://crates.io/crates/brk_client) · [llms.txt](https://bitview.space/llms.txt) · [LLM-friendly schema](https://bitview.space/api.json)
@@ -75,8 +73,9 @@ Build custom applications in Rust. Use the full stack or individual components (
## Donations
<a href="https://x.com/_Checkmatey_"><img src="https://pbs.twimg.com/profile_images/1657255419172253698/ncG0Gt8e_400x400.jpg" width="40" alt="Checkmate" title="Checkmate" style="border-radius:50%" /></a>
<a href="https://x.com/JohanMBergman"><img src="https://pbs.twimg.com/profile_images/1958587470120988673/7rlY5csu_400x400.jpg" width="40" alt="Johan" title="Johan" style="border-radius:50%" /></a>
<a href="https://x.com/_Checkmatey_"><img src="https://pbs.twimg.com/profile_images/1657255419172253698/ncG0Gt8e_400x400.jpg" width="40" alt="_Checkmatey_" title="_Checkmatey_" style="border-radius:50%" /></a>
<a href="https://x.com/JohanMBergman"><img src="https://pbs.twimg.com/profile_images/1958587470120988673/7rlY5csu_400x400.jpg" width="40" alt="Johan Bergman" title="Johan Bergman" style="border-radius:50%" /></a>
<a href="https://x.com/alonshvartsman"><img src="https://pbs.twimg.com/profile_images/2005689891028406272/8Qgmnurs_400x400.jpg" width="40" alt="Alon Shvartsman" title="Alon Shvartsman" style="border-radius:50%" /></a>
<a href="https://x.com/clearmined1"><img src="https://pbs.twimg.com/profile_images/1657777901830541313/6OAaR8XF_400x400.png" width="40" alt="ClearMined" title="ClearMined" style="border-radius:50%" /></a>
<img src="./qr.png" alt="Bitcoin donate QR code" width="120" />

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,7 @@
# Class: BrkError
Defined in: [Developer/brk/modules/brk-client/index.js:999](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L999)
Defined in: [Developer/brk/modules/brk-client/index.js:1163](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1163)
Custom error class for BRK client errors
@@ -20,7 +20,7 @@ Custom error class for BRK client errors
> **new BrkError**(`message`, `status?`): `BrkError`
Defined in: [Developer/brk/modules/brk-client/index.js:1004](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1004)
Defined in: [Developer/brk/modules/brk-client/index.js:1168](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1168)
#### Parameters

View File

@@ -60,11 +60,14 @@
- [BlockCumulativeNegativeSumPattern](interfaces/BlockCumulativeNegativeSumPattern.md)
- [BlockCumulativePattern](interfaces/BlockCumulativePattern.md)
- [BlockCumulativeSumPattern](interfaces/BlockCumulativeSumPattern.md)
- [BlockExtras](interfaces/BlockExtras.md)
- [BlockFeesEntry](interfaces/BlockFeesEntry.md)
- [BlockHashParam](interfaces/BlockHashParam.md)
- [BlockHashStartIndex](interfaces/BlockHashStartIndex.md)
- [BlockHashTxIndex](interfaces/BlockHashTxIndex.md)
- [BlockInfo](interfaces/BlockInfo.md)
- [BlockInfoV1](interfaces/BlockInfoV1.md)
- [BlockPool](interfaces/BlockPool.md)
- [BlockRewardsEntry](interfaces/BlockRewardsEntry.md)
- [BlocksDominancePattern](interfaces/BlocksDominancePattern.md)
- [BlocksDominanceRewardsPattern](interfaces/BlocksDominanceRewardsPattern.md)
@@ -112,6 +115,8 @@
- [CostBasisCohortParam](interfaces/CostBasisCohortParam.md)
- [CostBasisParams](interfaces/CostBasisParams.md)
- [CostBasisQuery](interfaces/CostBasisQuery.md)
- [CpfpEntry](interfaces/CpfpEntry.md)
- [CpfpInfo](interfaces/CpfpInfo.md)
- [CumulativeRollingSumPattern](interfaces/CumulativeRollingSumPattern.md)
- [DataRangeFormat](interfaces/DataRangeFormat.md)
- [DateRangeBuilder](interfaces/DateRangeBuilder.md)
@@ -139,6 +144,8 @@
- [HashrateSummary](interfaces/HashrateSummary.md)
- [Health](interfaces/Health.md)
- [HeightParam](interfaces/HeightParam.md)
- [HistoricalPrice](interfaces/HistoricalPrice.md)
- [HistoricalPriceEntry](interfaces/HistoricalPriceEntry.md)
- [IndexInfo](interfaces/IndexInfo.md)
- [InMaxMinPerSupplyPattern](interfaces/InMaxMinPerSupplyPattern.md)
- [InPattern](interfaces/InPattern.md)
@@ -150,11 +157,14 @@
- [MaxMedianMinPct10Pct25Pct75Pct90Pattern2](interfaces/MaxMedianMinPct10Pct25Pct75Pct90Pattern2.md)
- [MempoolBlock](interfaces/MempoolBlock.md)
- [MempoolInfo](interfaces/MempoolInfo.md)
- [MempoolRecentTx](interfaces/MempoolRecentTx.md)
- [MerkleProof](interfaces/MerkleProof.md)
- [NuplPattern](interfaces/NuplPattern.md)
- [NuplRealizedSupplyUnrealizedPattern](interfaces/NuplRealizedSupplyUnrealizedPattern.md)
- [OHLCCents](interfaces/OHLCCents.md)
- [OHLCDollars](interfaces/OHLCDollars.md)
- [OHLCSats](interfaces/OHLCSats.md)
- [OptionalTimestampParam](interfaces/OptionalTimestampParam.md)
- [PaginatedSeries](interfaces/PaginatedSeries.md)
- [Pagination](interfaces/Pagination.md)
- [Pct05Pct10Pct15Pct20Pct25Pct30Pct35Pct40Pct45Pct50Pct55Pct60Pct65Pct70Pct75Pct80Pct85Pct90Pct95Pattern](interfaces/Pct05Pct10Pct15Pct20Pct25Pct30Pct35Pct40Pct45Pct50Pct55Pct60Pct65Pct70Pct75Pct80Pct85Pct90Pct95Pattern.md)
@@ -165,12 +175,15 @@
- [PoolBlockShares](interfaces/PoolBlockShares.md)
- [PoolDetail](interfaces/PoolDetail.md)
- [PoolDetailInfo](interfaces/PoolDetailInfo.md)
- [PoolHashrateEntry](interfaces/PoolHashrateEntry.md)
- [PoolInfo](interfaces/PoolInfo.md)
- [PoolSlugAndHeightParam](interfaces/PoolSlugAndHeightParam.md)
- [PoolSlugParam](interfaces/PoolSlugParam.md)
- [PoolsSummary](interfaces/PoolsSummary.md)
- [PoolStats](interfaces/PoolStats.md)
- [PricePattern](interfaces/PricePattern.md)
- [PriceRatioPattern](interfaces/PriceRatioPattern.md)
- [Prices](interfaces/Prices.md)
- [RangeBuilder](interfaces/RangeBuilder.md)
- [RatioTransferValuePattern](interfaces/RatioTransferValuePattern.md)
- [RatioValuePattern](interfaces/RatioValuePattern.md)
@@ -423,6 +436,7 @@
- [CentsSquaredSats](type-aliases/CentsSquaredSats.md)
- [Close](type-aliases/Close.md)
- [Cohort](type-aliases/Cohort.md)
- [CoinbaseTag](type-aliases/CoinbaseTag.md)
- [CostBasisBucket](type-aliases/CostBasisBucket.md)
- [CostBasisValue](type-aliases/CostBasisValue.md)
- [Date](type-aliases/Date.md)
@@ -434,12 +448,12 @@
- [EmptyAddrIndex](type-aliases/EmptyAddrIndex.md)
- [EmptyOutputIndex](type-aliases/EmptyOutputIndex.md)
- [Epoch](type-aliases/Epoch.md)
- [ExchangeRates](type-aliases/ExchangeRates.md)
- [FeeRate](type-aliases/FeeRate.md)
- [Format](type-aliases/Format.md)
- [FundedAddrIndex](type-aliases/FundedAddrIndex.md)
- [Halving](type-aliases/Halving.md)
- [Height](type-aliases/Height.md)
- [Hex](type-aliases/Hex.md)
- [High](type-aliases/High.md)
- [Hour1](type-aliases/Hour1.md)
- [Hour12](type-aliases/Hour12.md)
@@ -517,7 +531,6 @@
- [SeriesPattern7](type-aliases/SeriesPattern7.md)
- [SeriesPattern8](type-aliases/SeriesPattern8.md)
- [SeriesPattern9](type-aliases/SeriesPattern9.md)
- [SeriesTree\_Positions](type-aliases/SeriesTree_Positions.md)
- [StoredBool](type-aliases/StoredBool.md)
- [StoredF32](type-aliases/StoredF32.md)
- [StoredF64](type-aliases/StoredF64.md)

View File

@@ -6,7 +6,7 @@
# Interface: \_0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdZscorePattern
Defined in: [Developer/brk/modules/brk-client/index.js:1685](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1685)
Defined in: [Developer/brk/modules/brk-client/index.js:1849](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1849)
## Properties
@@ -14,7 +14,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1685](https://github.com/
> **\_0sd**: [`CentsSatsUsdPattern`](CentsSatsUsdPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1686](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1686)
Defined in: [Developer/brk/modules/brk-client/index.js:1850](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1850)
***
@@ -22,7 +22,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1686](https://github.com/
> **m05sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1687](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1687)
Defined in: [Developer/brk/modules/brk-client/index.js:1851](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1851)
***
@@ -30,7 +30,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1687](https://github.com/
> **m15sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1688](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1688)
Defined in: [Developer/brk/modules/brk-client/index.js:1852](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1852)
***
@@ -38,7 +38,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1688](https://github.com/
> **m1sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1689](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1689)
Defined in: [Developer/brk/modules/brk-client/index.js:1853](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1853)
***
@@ -46,7 +46,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1689](https://github.com/
> **m25sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1690](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1690)
Defined in: [Developer/brk/modules/brk-client/index.js:1854](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1854)
***
@@ -54,7 +54,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1690](https://github.com/
> **m2sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1691](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1691)
Defined in: [Developer/brk/modules/brk-client/index.js:1855](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1855)
***
@@ -62,7 +62,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1691](https://github.com/
> **m3sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1692](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1692)
Defined in: [Developer/brk/modules/brk-client/index.js:1856](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1856)
***
@@ -70,7 +70,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1692](https://github.com/
> **p05sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1693](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1693)
Defined in: [Developer/brk/modules/brk-client/index.js:1857](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1857)
***
@@ -78,7 +78,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1693](https://github.com/
> **p15sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1694](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1694)
Defined in: [Developer/brk/modules/brk-client/index.js:1858](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1858)
***
@@ -86,7 +86,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1694](https://github.com/
> **p1sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1695](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1695)
Defined in: [Developer/brk/modules/brk-client/index.js:1859](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1859)
***
@@ -94,7 +94,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1695](https://github.com/
> **p25sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1696](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1696)
Defined in: [Developer/brk/modules/brk-client/index.js:1860](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1860)
***
@@ -102,7 +102,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1696](https://github.com/
> **p2sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1697](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1697)
Defined in: [Developer/brk/modules/brk-client/index.js:1861](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1861)
***
@@ -110,7 +110,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1697](https://github.com/
> **p3sd**: [`PriceRatioPattern`](PriceRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1698](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1698)
Defined in: [Developer/brk/modules/brk-client/index.js:1862](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1862)
***
@@ -118,7 +118,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1698](https://github.com/
> **sd**: [`SeriesPattern1`](../type-aliases/SeriesPattern1.md)\<`number`\>
Defined in: [Developer/brk/modules/brk-client/index.js:1699](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1699)
Defined in: [Developer/brk/modules/brk-client/index.js:1863](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1863)
***
@@ -126,4 +126,4 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1699](https://github.com/
> **zscore**: [`SeriesPattern1`](../type-aliases/SeriesPattern1.md)\<`number`\>
Defined in: [Developer/brk/modules/brk-client/index.js:1700](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1700)
Defined in: [Developer/brk/modules/brk-client/index.js:1864](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1864)

View File

@@ -6,7 +6,7 @@
# Interface: \_10y1m1w1y2y3m3y4y5y6m6y8yPattern2
Defined in: [Developer/brk/modules/brk-client/index.js:1704](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1704)
Defined in: [Developer/brk/modules/brk-client/index.js:1868](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1868)
## Properties
@@ -14,7 +14,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1704](https://github.com/
> **\_10y**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1705](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1705)
Defined in: [Developer/brk/modules/brk-client/index.js:1869](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1869)
***
@@ -22,7 +22,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1705](https://github.com/
> **\_1m**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1706](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1706)
Defined in: [Developer/brk/modules/brk-client/index.js:1870](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1870)
***
@@ -30,7 +30,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1706](https://github.com/
> **\_1w**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1707](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1707)
Defined in: [Developer/brk/modules/brk-client/index.js:1871](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1871)
***
@@ -38,7 +38,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1707](https://github.com/
> **\_1y**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1708](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1708)
Defined in: [Developer/brk/modules/brk-client/index.js:1872](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1872)
***
@@ -46,7 +46,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1708](https://github.com/
> **\_2y**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1709](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1709)
Defined in: [Developer/brk/modules/brk-client/index.js:1873](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1873)
***
@@ -54,7 +54,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1709](https://github.com/
> **\_3m**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1710](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1710)
Defined in: [Developer/brk/modules/brk-client/index.js:1874](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1874)
***
@@ -62,7 +62,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1710](https://github.com/
> **\_3y**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1711](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1711)
Defined in: [Developer/brk/modules/brk-client/index.js:1875](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1875)
***
@@ -70,7 +70,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1711](https://github.com/
> **\_4y**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1712](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1712)
Defined in: [Developer/brk/modules/brk-client/index.js:1876](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1876)
***
@@ -78,7 +78,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1712](https://github.com/
> **\_5y**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1713](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1713)
Defined in: [Developer/brk/modules/brk-client/index.js:1877](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1877)
***
@@ -86,7 +86,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1713](https://github.com/
> **\_6m**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1714](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1714)
Defined in: [Developer/brk/modules/brk-client/index.js:1878](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1878)
***
@@ -94,7 +94,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1714](https://github.com/
> **\_6y**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1715](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1715)
Defined in: [Developer/brk/modules/brk-client/index.js:1879](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1879)
***
@@ -102,4 +102,4 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1715](https://github.com/
> **\_8y**: [`BpsPercentRatioPattern`](BpsPercentRatioPattern.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1716](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1716)
Defined in: [Developer/brk/modules/brk-client/index.js:1880](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1880)

View File

@@ -6,7 +6,7 @@
# Interface: \_10y1m1w1y2y3m3y4y5y6m6y8yPattern3
Defined in: [Developer/brk/modules/brk-client/index.js:1743](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1743)
Defined in: [Developer/brk/modules/brk-client/index.js:1907](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1907)
## Properties
@@ -14,7 +14,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1743](https://github.com/
> **\_10y**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1744](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1744)
Defined in: [Developer/brk/modules/brk-client/index.js:1908](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1908)
***
@@ -22,7 +22,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1744](https://github.com/
> **\_1m**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1745](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1745)
Defined in: [Developer/brk/modules/brk-client/index.js:1909](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1909)
***
@@ -30,7 +30,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1745](https://github.com/
> **\_1w**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1746](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1746)
Defined in: [Developer/brk/modules/brk-client/index.js:1910](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1910)
***
@@ -38,7 +38,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1746](https://github.com/
> **\_1y**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1747](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1747)
Defined in: [Developer/brk/modules/brk-client/index.js:1911](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1911)
***
@@ -46,7 +46,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1747](https://github.com/
> **\_2y**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1748](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1748)
Defined in: [Developer/brk/modules/brk-client/index.js:1912](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1912)
***
@@ -54,7 +54,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1748](https://github.com/
> **\_3m**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1749](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1749)
Defined in: [Developer/brk/modules/brk-client/index.js:1913](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1913)
***
@@ -62,7 +62,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1749](https://github.com/
> **\_3y**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1750](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1750)
Defined in: [Developer/brk/modules/brk-client/index.js:1914](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1914)
***
@@ -70,7 +70,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1750](https://github.com/
> **\_4y**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1751](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1751)
Defined in: [Developer/brk/modules/brk-client/index.js:1915](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1915)
***
@@ -78,7 +78,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1751](https://github.com/
> **\_5y**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1752](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1752)
Defined in: [Developer/brk/modules/brk-client/index.js:1916](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1916)
***
@@ -86,7 +86,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1752](https://github.com/
> **\_6m**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1753](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1753)
Defined in: [Developer/brk/modules/brk-client/index.js:1917](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1917)
***
@@ -94,7 +94,7 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1753](https://github.com/
> **\_6y**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1754](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1754)
Defined in: [Developer/brk/modules/brk-client/index.js:1918](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1918)
***
@@ -102,4 +102,4 @@ Defined in: [Developer/brk/modules/brk-client/index.js:1754](https://github.com/
> **\_8y**: [`BtcCentsSatsUsdPattern3`](BtcCentsSatsUsdPattern3.md)
Defined in: [Developer/brk/modules/brk-client/index.js:1755](https://github.com/bitcoinresearchkit/brk/blob/d4dc1b9e4900e3787f2a133b8cac5d304acff9bf/modules/brk-client/index.js#L1755)
Defined in: [Developer/brk/modules/brk-client/index.js:1919](https://github.com/bitcoinresearchkit/brk/blob/83edef4806773ef7225b7c0de863c44bd953169d/modules/brk-client/index.js#L1919)

Some files were not shown because too many files have changed in this diff Show More