global: snapshot + pools + fixes

This commit is contained in:
nym21
2025-09-07 17:01:34 +02:00
parent e155a3dacf
commit 4049d694f7
69 changed files with 2768 additions and 1444 deletions

2
.gitignore vendored
View File

@@ -4,7 +4,7 @@
# Builds
target
websites/dist
vecid-to-indexes.js
bridge/
/ids.txt
# Copies

192
Cargo.lock generated
View File

@@ -54,6 +54,28 @@ dependencies = [
"alloc-no-stdlib",
]
[[package]]
name = "allocative"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fac2ce611db8b8cee9b2aa886ca03c924e9da5e5295d0dbd0526e5d0b0710f7"
dependencies = [
"allocative_derive",
"ctor",
"parking_lot 0.11.2",
]
[[package]]
name = "allocative_derive"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe233a377643e0fc1a56421d7c90acdec45c291b30345eb9f08e8d0ddce5a4ab"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.106",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
@@ -539,6 +561,8 @@ dependencies = [
name = "brk_computer"
version = "0.0.98"
dependencies = [
"allocative",
"allocative_derive",
"bitcoin",
"bitcoincore-rpc",
"brk_error",
@@ -546,13 +570,17 @@ dependencies = [
"brk_indexer",
"brk_logger",
"brk_parser",
"brk_store",
"brk_structs",
"derive_deref",
"inferno",
"jiff",
"log",
"num_enum",
"pco",
"rayon",
"serde",
"serde_json",
"vecdb",
"zerocopy",
"zerocopy-derive",
@@ -1068,6 +1096,8 @@ dependencies = [
name = "brk_structs"
version = "0.0.98"
dependencies = [
"allocative",
"allocative_derive",
"bitcoin",
"bitcoincore-rpc",
"brk_error",
@@ -1118,6 +1148,12 @@ version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "175812e0be2bccb6abe50bb8d566126198344f707e304f45c648fd8f2cc0365e"
[[package]]
name = "bytemuck"
version = "1.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677"
[[package]]
name = "byteorder"
version = "1.5.0"
@@ -1202,6 +1238,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eac00902d9d136acd712710d71823fb8ac8004ca445a89e73a41d45aa712931"
dependencies = [
"clap_builder",
"clap_derive",
]
[[package]]
@@ -1459,6 +1496,16 @@ dependencies = [
"smallvec",
]
[[package]]
name = "ctor"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
dependencies = [
"quote",
"syn 1.0.109",
]
[[package]]
name = "ctrlc"
version = "3.4.7"
@@ -1550,7 +1597,7 @@ dependencies = [
"hashbrown 0.14.5",
"lock_api",
"once_cell",
"parking_lot_core",
"parking_lot_core 0.9.11",
]
[[package]]
@@ -2289,6 +2336,28 @@ dependencies = [
"cfb",
]
[[package]]
name = "inferno"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e96d2465363ed2d81857759fc864cf6bb7997f79327aec028d65bd7989393685"
dependencies = [
"ahash",
"clap",
"crossbeam-channel",
"crossbeam-utils",
"dashmap",
"env_logger",
"indexmap 2.11.0",
"itoa",
"log",
"num-format",
"once_cell",
"quick-xml",
"rgb",
"str_stack",
]
[[package]]
name = "inotify"
version = "0.11.0"
@@ -2309,6 +2378,15 @@ dependencies = [
"libc",
]
[[package]]
name = "instant"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
dependencies = [
"cfg-if",
]
[[package]]
name = "interval-heap"
version = "0.0.5"
@@ -2472,7 +2550,7 @@ checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3"
dependencies = [
"bitflags 2.9.4",
"libc",
"redox_syscall",
"redox_syscall 0.5.17",
]
[[package]]
@@ -2698,6 +2776,16 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num-format"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3"
dependencies = [
"arrayvec",
"itoa",
]
[[package]]
name = "num-integer"
version = "0.1.46"
@@ -3262,6 +3350,17 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "parking_lot"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
dependencies = [
"instant",
"lock_api",
"parking_lot_core 0.8.6",
]
[[package]]
name = "parking_lot"
version = "0.12.4"
@@ -3269,7 +3368,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13"
dependencies = [
"lock_api",
"parking_lot_core",
"parking_lot_core 0.9.11",
]
[[package]]
name = "parking_lot_core"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
dependencies = [
"cfg-if",
"instant",
"libc",
"redox_syscall 0.2.16",
"smallvec",
"winapi",
]
[[package]]
@@ -3280,7 +3393,7 @@ checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"redox_syscall 0.5.17",
"smallvec",
"windows-targets 0.52.6",
]
@@ -3531,6 +3644,15 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "quick-xml"
version = "0.37.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb"
dependencies = [
"memchr",
]
[[package]]
name = "quick_cache"
version = "0.6.16"
@@ -3540,7 +3662,7 @@ dependencies = [
"ahash",
"equivalent",
"hashbrown 0.15.5",
"parking_lot",
"parking_lot 0.12.4",
]
[[package]]
@@ -3665,6 +3787,15 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "redox_syscall"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "redox_syscall"
version = "0.5.17"
@@ -3733,6 +3864,15 @@ dependencies = [
"memchr",
]
[[package]]
name = "rgb"
version = "0.8.52"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce"
dependencies = [
"bytemuck",
]
[[package]]
name = "ring"
version = "0.17.14"
@@ -3935,13 +4075,13 @@ checksum = "1bc711410fbe7399f390ca1c3b60ad0f53f80e95c5eb935e52268a0e2cd49acc"
[[package]]
name = "seqdb"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ade192d6f8a1e58c43428c8e194016124f20744f75cb14e2fcc0c79309fe4f91"
dependencies = [
"allocative",
"allocative_derive",
"libc",
"log",
"memmap2",
"parking_lot",
"parking_lot 0.12.4",
"rayon",
"zerocopy",
"zerocopy-derive",
@@ -4172,6 +4312,12 @@ version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d08889ec5408683408db66ad89e0e1f93dff55c73a4ccc71c427d5b277ee47e6"
[[package]]
name = "str_stack"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9091b6114800a5f2141aee1d1b9d6ca3592ac062dc5decb3764ec5895a47b4eb"
[[package]]
name = "strsim"
version = "0.11.1"
@@ -4785,12 +4931,12 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]]
name = "vecdb"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c3e103eeb634851690d815a886ba8fe1ab21d2f520d915c411979bc76de8419"
dependencies = [
"allocative",
"allocative_derive",
"ctrlc",
"log",
"parking_lot",
"parking_lot 0.12.4",
"pco",
"rayon",
"seqdb",
@@ -4805,8 +4951,6 @@ dependencies = [
[[package]]
name = "vecdb_derive"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bdf71ae8eeaf2f187cee599e8e34a49bf6fd0536977bd009d94be35e3801754"
dependencies = [
"quote",
"syn 2.0.106",
@@ -4929,6 +5073,22 @@ version = "0.25.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.10"
@@ -4938,6 +5098,12 @@ dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-core"
version = "0.61.2"

View File

@@ -23,6 +23,8 @@ debug = true
inherits = "release"
[workspace.dependencies]
allocative = { version = "0.3.4", features = ["parking_lot"] }
allocative_derive = "0.3.3"
axum = "0.8.4"
bitcoin = { version = "0.32.7", features = ["serde"] }
bitcoincore-rpc = "0.19.0"
@@ -53,8 +55,8 @@ serde_bytes = "0.11.17"
serde_derive = "1.0.219"
serde_json = { version = "1.0.143", features = ["float_roundtrip"] }
tokio = { version = "1.47.1", features = ["rt-multi-thread"] }
# vecdb = { path = "../seqdb/crates/vecdb", features = ["derive"]}
vecdb = { version = "0.2.9", features = ["derive"]}
vecdb = { path = "../seqdb/crates/vecdb", features = ["derive"]}
# vecdb = { version = "0.2.9", features = ["derive"]}
zerocopy = "0.8.26"
zerocopy-derive = "0.8.26"

View File

@@ -64,11 +64,9 @@
- miners
- maybe xpubs
- charts
- improve some names and colors
- remove `sum` series when it's a duplicate of the `base` (in subsidy for example)
- improve names and colors
- selected unit sometimes changes when going back end forth
- add support for custom charts
- separate z-score charts from "realized price" (with their own prices), have 4y, 2y and 1y
- price scale format depends on unit, hide digits for sats for example (if/when possible)
- table
- pagination

View File

@@ -35,7 +35,9 @@ pub async fn bundle(websites_path: &Path, source_folder: &str, watch: bool) -> i
..Default::default()
});
bundler.write().await.unwrap();
if let Err(error) = bundler.write().await {
error!("{error:?}");
}
let absolute_source_index_path = source_path.join("index.html").absolutize();
let absolute_source_index_path_clone = absolute_source_index_path.clone();

View File

@@ -1,19 +1,20 @@
use std::{fs, io, path::Path};
use brk_computer::pools;
use brk_interface::{Index, Interface};
use brk_server::VERSION;
use crate::website::Website;
const SCRIPTS: &str = "scripts";
const BRIDGE_PATH: &str = "scripts/bridge";
#[allow(clippy::upper_case_acronyms)]
pub trait Bridge {
fn generate_bridge_file(&self, website: Website, websites_path: &Path) -> io::Result<()>;
fn generate_bridge_files(&self, website: Website, websites_path: &Path) -> io::Result<()>;
}
impl Bridge for Interface<'static> {
fn generate_bridge_file(&self, website: Website, websites_path: &Path) -> io::Result<()> {
fn generate_bridge_files(&self, website: Website, websites_path: &Path) -> io::Result<()> {
if website.is_none() {
return Ok(());
}
@@ -24,88 +25,130 @@ impl Bridge for Interface<'static> {
return Ok(());
}
let path = path.join(SCRIPTS);
let path = path.join(BRIDGE_PATH);
fs::create_dir_all(&path)?;
let path = path.join(Path::new("vecid-to-indexes.js"));
generate_vecs_file(self, &path)?;
generate_pools_file(&path)
}
}
let indexes = Index::all();
fn generate_pools_file(parent: &Path) -> io::Result<()> {
let path = parent.join(Path::new("pools.js"));
let mut contents = format!(
"//
let pools = pools();
let mut contents = "//
// File auto-generated, any modifications will be overwritten
//
"
.to_string();
contents += "
/** @typedef {ReturnType<typeof createPools>} Pools */
/** @typedef {keyof Pools} Pool */
export function createPools() {
return /** @type {const} */ ({
";
let mut sorted_pools: Vec<_> = pools.iter().collect();
sorted_pools.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
contents += &sorted_pools
.iter()
.map(|pool| {
let id = pool.serialized_id();
format!(" {id}: \"{}\",", pool.name)
})
.collect::<Vec<_>>()
.join("\n");
contents += "\n });\n}\n";
fs::write(path, contents)
}
fn generate_vecs_file(interface: &Interface<'static>, parent: &Path) -> io::Result<()> {
let path = parent.join(Path::new("vecs.js"));
let indexes = Index::all();
let mut contents = format!(
"//
// File auto-generated, any modifications will be overwritten
//
export const VERSION = \"v{VERSION}\";
"
);
);
contents += &indexes
contents += &indexes
.iter()
.enumerate()
.map(|(i_of_i, i)| {
// let lowered = i.to_string().to_lowercase();
format!("/** @typedef {{{i_of_i}}} {i} */",)
})
.collect::<Vec<_>>()
.join("\n");
contents += &format!(
"\n\n/** @typedef {{{}}} Index */\n",
indexes
.iter()
.enumerate()
.map(|(i_of_i, i)| {
// let lowered = i.to_string().to_lowercase();
format!("/** @typedef {{{i_of_i}}} {i} */",)
})
.map(|i| i.to_string())
.collect::<Vec<_>>()
.join("\n");
.join(" | ")
);
contents += &format!(
"\n\n/** @typedef {{{}}} Index */\n",
indexes
.iter()
.map(|i| i.to_string())
.collect::<Vec<_>>()
.join(" | ")
);
contents += "
contents += "
/** @typedef {ReturnType<typeof createIndexes>} Indexes */
export function createIndexes() {
return {
return {
";
contents += &indexes
.iter()
.enumerate()
.map(|(i_of_i, i)| {
let lowered = i.to_string().to_lowercase();
format!(" {lowered}: /** @satisfies {{{i}}} */ ({i_of_i}),",)
})
.collect::<Vec<_>>()
.join("\n");
contents += &indexes
.iter()
.enumerate()
.map(|(i_of_i, i)| {
let lowered = i.to_string().to_lowercase();
format!(" {lowered}: /** @satisfies {{{i}}} */ ({i_of_i}),",)
})
.collect::<Vec<_>>()
.join("\n");
contents += " };\n}\n";
contents += " };\n}\n";
contents += "
contents += "
/** @typedef {ReturnType<typeof createVecIdToIndexes>} VecIdToIndexes
/** @typedef {keyof VecIdToIndexes} VecId */
/**
* @returns {Record<any, number[]>}
*/
* @returns {Record<any, number[]>}
*/
export function createVecIdToIndexes() {
return {
return {
";
self.id_to_index_to_vec()
.iter()
.for_each(|(id, index_to_vec)| {
let indexes = index_to_vec
.keys()
.map(|i| (*i as u8).to_string())
// .map(|i| i.to_string())
.collect::<Vec<_>>()
.join(", ");
interface
.id_to_index_to_vec()
.iter()
.for_each(|(id, index_to_vec)| {
let indexes = index_to_vec
.keys()
.map(|i| (*i as u8).to_string())
// .map(|i| i.to_string())
.collect::<Vec<_>>()
.join(", ");
contents += &format!(" \"{id}\": [{indexes}],\n");
});
contents += &format!(" \"{id}\": [{indexes}],\n");
});
contents += " };\n}\n";
contents += " };\n}\n";
fs::write(path, contents)
}
fs::write(path, contents)
}

View File

@@ -6,7 +6,6 @@ use std::{
use bitcoincore_rpc::{self, Auth, Client};
use brk_fetcher::Fetcher;
use clap::Parser;
use clap_derive::Parser;
use color_eyre::eyre::eyre;
use serde::{Deserialize, Deserializer, Serialize};

View File

@@ -107,7 +107,7 @@ pub fn run() -> color_eyre::Result<()> {
downloaded_websites_path
};
interface.generate_bridge_file(website, websites_path.as_path())?;
interface.generate_bridge_files(website, websites_path.as_path())?;
Some(bundle(&websites_path, website.to_folder_name(), true).await?)
} else {
@@ -135,6 +135,8 @@ pub fn run() -> color_eyre::Result<()> {
let starting_indexes =
indexer.index(&parser, rpc, &exit, config.check_collisions()).unwrap();
// dbg!(&starting_indexes);
computer.compute(&indexer, starting_indexes, &exit).unwrap();
info!("Waiting for new blocks...");

View File

@@ -2,6 +2,7 @@ use clap_derive::ValueEnum;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, ValueEnum)]
#[serde(rename_all = "lowercase")]
pub enum Website {
None,
Bitview,

View File

@@ -10,6 +10,8 @@ rust-version.workspace = true
build = "build.rs"
[dependencies]
allocative = { workspace = true }
allocative_derive = { workspace = true }
bitcoin = { workspace = true }
bitcoincore-rpc = { workspace = true }
brk_structs = { workspace = true }
@@ -17,14 +19,18 @@ brk_error = { workspace = true }
brk_fetcher = { workspace = true }
brk_indexer = { workspace = true }
brk_logger = { workspace = true }
brk_store = { workspace = true }
brk_parser = { workspace = true }
vecdb = { workspace = true }
derive_deref = { workspace = true }
inferno = "0.12.3"
jiff = { workspace = true }
log = { workspace = true }
num_enum = "0.7.4"
pco = "0.4.6"
rayon = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
zerocopy = { workspace = true }
zerocopy-derive = { workspace = true }

View File

@@ -4,7 +4,8 @@ use brk_computer::{Computer, pools};
use brk_error::Result;
use brk_fetcher::Fetcher;
use brk_indexer::Indexer;
use vecdb::Exit;
use brk_structs::{AddressBytes, OutputIndex, OutputType};
use vecdb::{AnyIterableVec, Exit, VecIterator};
fn main() -> Result<()> {
brk_logger::init(Some(Path::new(".log")))?;
@@ -27,20 +28,89 @@ fn main() -> Result<()> {
let mut res: BTreeMap<&'static str, usize> = BTreeMap::default();
let mut height_to_first_txindex_iter = indexer.vecs.height_to_first_txindex.iter();
// let mut i = indexer.vecs.txz
let vecs = indexer.vecs;
let stores = indexer.stores;
indexer
.stores
let mut height_to_first_txindex_iter = vecs.height_to_first_txindex.iter();
let mut txindex_to_first_outputindex_iter = vecs.txindex_to_first_outputindex.iter();
let mut txindex_to_output_count_iter = computer.indexes.txindex_to_output_count.iter();
let mut outputindex_to_outputtype_iter = vecs.outputindex_to_outputtype.iter();
let mut outputindex_to_typeindex_iter = vecs.outputindex_to_typeindex.iter();
let mut p2pk65addressindex_to_p2pk65bytes_iter =
vecs.p2pk65addressindex_to_p2pk65bytes.iter();
let mut p2pk33addressindex_to_p2pk33bytes_iter =
vecs.p2pk33addressindex_to_p2pk33bytes.iter();
let mut p2pkhaddressindex_to_p2pkhbytes_iter =
vecs.p2pkhaddressindex_to_p2pkhbytes.iter();
let mut p2shaddressindex_to_p2shbytes_iter = vecs.p2shaddressindex_to_p2shbytes.iter();
let mut p2wpkhaddressindex_to_p2wpkhbytes_iter =
vecs.p2wpkhaddressindex_to_p2wpkhbytes.iter();
let mut p2wshaddressindex_to_p2wshbytes_iter =
vecs.p2wshaddressindex_to_p2wshbytes.iter();
let mut p2traddressindex_to_p2trbytes_iter = vecs.p2traddressindex_to_p2trbytes.iter();
let mut p2aaddressindex_to_p2abytes_iter = vecs.p2aaddressindex_to_p2abytes.iter();
let unknown = pools.get_unknown();
stores
.height_to_coinbase_tag
.iter()
.for_each(|(_, coinbase_tag)| {
let pool = pools.find_from_coinbase_tag(&coinbase_tag);
if let Some(pool) = pool {
*res.entry(pool.name).or_default() += 1;
} else {
*res.entry(pools.get_unknown().name).or_default() += 1;
}
.for_each(|(height, coinbase_tag)| {
let txindex = height_to_first_txindex_iter.unwrap_get_inner(height);
let outputindex = txindex_to_first_outputindex_iter.unwrap_get_inner(txindex);
let outputcount = txindex_to_output_count_iter.unwrap_get_inner(txindex);
let pool = (*outputindex..(*outputindex + *outputcount))
.map(OutputIndex::from)
.find_map(|outputindex| {
let outputtype =
outputindex_to_outputtype_iter.unwrap_get_inner(outputindex);
let typeindex =
outputindex_to_typeindex_iter.unwrap_get_inner(outputindex);
let address = match outputtype {
OutputType::P2PK65 => Some(AddressBytes::from(
p2pk65addressindex_to_p2pk65bytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2PK33 => Some(AddressBytes::from(
p2pk33addressindex_to_p2pk33bytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2PKH => Some(AddressBytes::from(
p2pkhaddressindex_to_p2pkhbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2SH => Some(AddressBytes::from(
p2shaddressindex_to_p2shbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2WPKH => Some(AddressBytes::from(
p2wpkhaddressindex_to_p2wpkhbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2WSH => Some(AddressBytes::from(
p2wshaddressindex_to_p2wshbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2TR => Some(AddressBytes::from(
p2traddressindex_to_p2trbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2A => Some(AddressBytes::from(
p2aaddressindex_to_p2abytes_iter
.unwrap_get_inner(typeindex.into()),
)),
_ => None,
};
address
.and_then(|address| pools.find_from_address(&address.to_string()))
})
.or_else(|| pools.find_from_coinbase_tag(&coinbase_tag))
.unwrap_or(unknown);
*res.entry(pool.name).or_default() += 1;
});
let mut v = res.into_iter().map(|(k, v)| (v, k)).collect::<Vec<_>>();

View File

@@ -1,5 +1,6 @@
use std::path::Path;
use allocative::Allocative;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_structs::{
@@ -30,7 +31,7 @@ const TARGET_BLOCKS_PER_SEMESTER: u64 = 2 * TARGET_BLOCKS_PER_QUARTER;
const TARGET_BLOCKS_PER_YEAR: u64 = 2 * TARGET_BLOCKS_PER_SEMESTER;
const TARGET_BLOCKS_PER_DECADE: u64 = 10 * TARGET_BLOCKS_PER_YEAR;
#[derive(Clone)]
#[derive(Clone, Allocative)]
pub struct Vecs {
db: Database,
@@ -51,6 +52,9 @@ pub struct Vecs {
pub halvingepoch_to_timestamp: EagerVec<HalvingEpoch, Timestamp>,
pub timeindexes_to_timestamp: ComputedVecsFromDateIndex<Timestamp>,
pub indexes_to_block_count: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_1w_block_count: ComputedVecsFromDateIndex<StoredU32>,
pub indexes_to_1m_block_count: ComputedVecsFromDateIndex<StoredU32>,
pub indexes_to_1y_block_count: ComputedVecsFromDateIndex<StoredU32>,
pub indexes_to_block_interval: ComputedVecsFromHeight<Timestamp>,
pub indexes_to_block_size: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_block_vbytes: ComputedVecsFromHeight<StoredU64>,
@@ -389,6 +393,30 @@ impl Vecs {
indexes,
VecBuilderOptions::default().add_sum().add_cumulative(),
)?,
indexes_to_1w_block_count: ComputedVecsFromDateIndex::forced_import(
&db,
"1w_block_count",
Source::Compute,
version + VERSION + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_1m_block_count: ComputedVecsFromDateIndex::forced_import(
&db,
"1m_block_count",
Source::Compute,
version + VERSION + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_1y_block_count: ComputedVecsFromDateIndex::forced_import(
&db,
"1y_block_count",
Source::Compute,
version + VERSION + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_block_weight: ComputedVecsFromHeight::forced_import(
&db,
"block_weight",
@@ -937,6 +965,54 @@ impl Vecs {
},
)?;
self.indexes_to_1w_block_count.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|v, _, _, starting_indexes, exit| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.unwrap_sum(),
7,
exit,
)?;
Ok(())
},
)?;
self.indexes_to_1m_block_count.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|v, _, _, starting_indexes, exit| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.unwrap_sum(),
30,
exit,
)?;
Ok(())
},
)?;
self.indexes_to_1y_block_count.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|v, _, _, starting_indexes, exit| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.unwrap_sum(),
365,
exit,
)?;
Ok(())
},
)?;
let mut height_to_timestamp_iter = indexer.vecs.height_to_timestamp.iter();
self.height_to_interval.compute_transform(
starting_indexes.height,
@@ -1769,6 +1845,9 @@ impl Vecs {
self.indexes_to_hash_rate_1y_sma.vecs(),
self.timeindexes_to_timestamp.vecs(),
self.indexes_to_block_count.vecs(),
self.indexes_to_1w_block_count.vecs(),
self.indexes_to_1m_block_count.vecs(),
self.indexes_to_1y_block_count.vecs(),
self.indexes_to_block_interval.vecs(),
self.indexes_to_block_size.vecs(),
self.indexes_to_block_vbytes.vecs(),

View File

@@ -1,3 +1,4 @@
use allocative::Allocative;
use brk_error::{Error, Result};
use brk_structs::{CheckedSub, StoredU64, Version};
use vecdb::{
@@ -9,7 +10,7 @@ use crate::utils::get_percentile;
use super::ComputedType;
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Allocative)]
pub struct EagerVecBuilder<I, T>
where
I: StoredIndex,

View File

@@ -1,3 +1,4 @@
use allocative::Allocative;
use brk_structs::Version;
use vecdb::{
AnyBoxedIterableVec, AnyCloneableIterableVec, AnyCollectableVec, FromCoarserIndex,
@@ -9,7 +10,7 @@ use crate::grouped::{EagerVecBuilder, VecBuilderOptions};
use super::ComputedType;
#[allow(clippy::type_complexity)]
#[derive(Clone)]
#[derive(Clone, Allocative)]
pub struct LazyVecBuilder<I, T, S1I, S2T>
where
I: StoredIndex,

View File

@@ -1,3 +1,4 @@
use allocative::Allocative;
use brk_error::Result;
use brk_indexer::Indexer;
@@ -10,7 +11,7 @@ use crate::{Indexes, grouped::LazyVecBuilder, indexes};
use super::{ComputedType, EagerVecBuilder, Source, VecBuilderOptions};
#[derive(Clone)]
#[derive(Clone, Allocative)]
pub struct ComputedVecsFromDateIndex<T>
where
T: ComputedType + PartialOrd,

View File

@@ -1,3 +1,4 @@
use allocative::Allocative;
use brk_error::Result;
use brk_indexer::Indexer;
@@ -15,7 +16,7 @@ use crate::{
use super::{ComputedType, EagerVecBuilder, VecBuilderOptions};
#[derive(Clone)]
#[derive(Clone, Allocative)]
pub struct ComputedVecsFromHeight<T>
where
T: ComputedType + PartialOrd,

View File

@@ -1,3 +1,4 @@
use allocative::Allocative;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_structs::{
@@ -17,7 +18,7 @@ use crate::{
use super::{ComputedType, EagerVecBuilder, VecBuilderOptions};
#[derive(Clone)]
#[derive(Clone, Allocative)]
pub struct ComputedVecsFromTxindex<T>
where
T: ComputedType + PartialOrd,

View File

@@ -1,3 +1,4 @@
use allocative::Allocative;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_structs::{Bitcoin, Dollars, Height, Sats, Version};
@@ -12,7 +13,7 @@ use crate::{
use super::{ComputedVecsFromHeight, VecBuilderOptions};
#[derive(Clone)]
#[derive(Clone, Allocative)]
pub struct ComputedValueVecsFromHeight {
pub sats: ComputedVecsFromHeight<Sats>,
pub bitcoin: ComputedVecsFromHeight<Bitcoin>,

View File

@@ -1,3 +1,4 @@
use allocative::Allocative;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_structs::{Bitcoin, Close, Dollars, Height, Sats, TxIndex, Version};
@@ -10,7 +11,7 @@ use crate::{Indexes, grouped::Source, indexes, price};
use super::{ComputedVecsFromTxindex, VecBuilderOptions};
#[derive(Clone)]
#[derive(Clone, Allocative)]
pub struct ComputedValueVecsFromTxindex {
pub sats: ComputedVecsFromTxindex<Sats>,
pub bitcoin_txindex: LazyVecFrom1<TxIndex, Bitcoin, TxIndex, Sats>,

View File

@@ -1,6 +1,6 @@
#![doc = include_str!("../README.md")]
use std::{path::Path, thread};
use std::path::Path;
use brk_error::Result;
use brk_fetcher::Fetcher;
@@ -34,6 +34,7 @@ pub struct Computer {
pub indexes: indexes::Vecs,
pub constants: constants::Vecs,
pub market: market::Vecs,
pub pools: pools::Vecs,
pub price: Option<price::Vecs>,
pub chain: chain::Vecs,
pub stateful: stateful::Vecs,
@@ -41,7 +42,7 @@ pub struct Computer {
pub cointime: cointime::Vecs,
}
const VERSION: Version = Version::TWO;
const VERSION: Version = Version::new(4);
impl Computer {
/// Do NOT import multiple times or things will break !!!
@@ -86,6 +87,12 @@ impl Computer {
&indexes,
price.as_ref(),
)?,
pools: pools::Vecs::forced_import(
&computed_path,
VERSION + Version::ZERO,
&indexes,
price.as_ref(),
)?,
cointime: cointime::Vecs::forced_import(
&computed_path,
VERSION + Version::ZERO,
@@ -125,8 +132,8 @@ impl Computer {
)?;
}
thread::scope(|scope| -> Result<()> {
let chain = scope.spawn(|| {
std::thread::scope(|scope| -> Result<()> {
let chain = scope.spawn(|| -> Result<()> {
info!("Computing chain...");
self.chain.compute(
indexer,
@@ -134,23 +141,33 @@ impl Computer {
&starting_indexes,
self.price.as_ref(),
exit,
)
});
let market = scope.spawn(|| -> Result<()> {
if let Some(price) = self.price.as_ref() {
info!("Computing market...");
self.market
.compute(indexer, &self.indexes, price, &starting_indexes, exit)?;
}
)?;
Ok(())
});
if let Some(price) = self.price.as_ref() {
info!("Computing market...");
self.market
.compute(indexer, &self.indexes, price, &starting_indexes, exit)?;
}
// let _ = generate_allocation_files(&self.pools);
chain.join().unwrap()?;
market.join().unwrap()?;
Ok(())
})?;
self.pools.compute(
indexer,
&self.indexes,
&starting_indexes,
&self.chain,
self.price.as_ref(),
exit,
)?;
return Ok(());
info!("Computing stateful...");
self.stateful.compute(
indexer,
@@ -183,6 +200,7 @@ impl Computer {
self.chain.vecs(),
self.stateful.vecs(),
self.cointime.vecs(),
self.pools.vecs(),
self.fetched.as_ref().map_or(vec![], |v| v.vecs()),
self.price.as_ref().map_or(vec![], |v| v.vecs()),
]
@@ -195,3 +213,36 @@ impl Computer {
Box::leak(Box::new(self.clone()))
}
}
// pub fn generate_allocation_files(monitored: &pools::Vecs) -> Result<()> {
// info!("Generating Allocative files...");
// let mut flamegraph = allocative::FlameGraphBuilder::default();
// flamegraph.visit_root(monitored);
// let output = flamegraph.finish();
// let folder = format!(
// "at-{}",
// jiff::Timestamp::now().strftime("%Y-%m-%d_%Hh%Mm%Ss"),
// );
// let path = std::path::PathBuf::from(&format!("./target/flamegraph/{folder}"));
// std::fs::create_dir_all(&path)?;
// // fs::write(path.join("flamegraph.src"), &output.flamegraph())?;
// let mut fg_svg = Vec::new();
// inferno::flamegraph::from_reader(
// &mut inferno::flamegraph::Options::default(),
// output.flamegraph().write().as_bytes(),
// &mut fg_svg,
// )?;
// std::fs::write(path.join("flamegraph.svg"), &fg_svg)?;
// std::fs::write(path.join("warnings.txt"), output.warnings())?;
// info!("Successfully generated Allocative files");
// Ok(())
// }

View File

@@ -1,101 +1,123 @@
use allocative::Allocative;
use num_enum::{FromPrimitive, IntoPrimitive};
use serde::{Deserialize, Serialize};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
// Created from the list in `pools.rs`
// Can be used as index for said list
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, FromPrimitive, IntoPrimitive)]
#[repr(u16)]
#[derive(
Debug,
Copy,
Clone,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
FromPrimitive,
IntoPrimitive,
FromBytes,
IntoBytes,
Immutable,
KnownLayout,
Allocative,
)]
#[serde(rename_all = "lowercase")]
#[repr(u8)]
pub enum PoolId {
#[default]
Unknown,
BlockFills,
Ultimuspool,
UltimusPool,
TerraPool,
Luxor,
OneTHash,
BTCCom,
OneThash,
BtcCom,
Bitfarms,
HuobiPool,
WayiCn,
CanoePool,
BTCTop,
BtcTop,
BitcoinCom,
OneSevenFiveBtc,
GBMiners,
Pool175btc,
GbMiners,
AXbt,
ASICMiner,
AsicMiner,
BitMinter,
BitcoinRussia,
BTCServ,
BtcServ,
SimplecoinUs,
BTCGuild,
BtcGuild,
Eligius,
OzCoin,
EclipseMC,
MaxBTC,
EclipseMc,
MaxBtc,
TripleMining,
CoinLab,
FiftyBTC,
GHashIO,
STMiningCorp,
Pool50btc,
GhashIo,
StMiningCorp,
Bitparking,
MMPool,
Mmpool,
Polmine,
KnCMiner,
KncMiner,
Bitalo,
F2Pool,
HHTT,
Hhtt,
MegaBigPower,
MtRed,
NMCbit,
NmcBit,
YourbtcNet,
GiveMeCoins,
BraiinsPool,
AntPool,
MultiCoinCo,
BCPoolIo,
BcpoolIo,
Cointerra,
KanoPool,
SoloCK,
CKPool,
SoloCk,
CkPool,
NiceHash,
BitClub,
BitcoinAffiliateNetwork,
BTCC,
BWPool,
EXXAndBW,
Btcc,
BwPool,
ExxBw,
Bitsolo,
BitFury,
TwentyOneInc,
DigitalBTC,
DigitalBtc,
EightBaochi,
MyBTCcoinPool,
TBDice,
HASHPOOL,
MyBtcCoinPool,
TbDice,
HashPool,
Nexious,
BravoMining,
HotPool,
OKExPool,
BCMonster,
OkExPool,
BcMonster,
OneHash,
Bixin,
TATMASPool,
ViaBTC,
ConnectBTC,
BATPOOL,
TatmasPool,
ViaBtc,
ConnectBtc,
BatPool,
Waterhole,
DCExploration,
DCEX,
BTPOOL,
DcExploration,
Dcex,
BtPool,
FiftyEightCoin,
BitcoinIndiaLowercase,
BitcoinIndia,
ShawnP0wers,
PHashIO,
PHashIo,
RigPool,
HAOZHUZHU,
HaoZhuZhu,
SevenPool,
MiningKings,
HashBX,
DPOOL,
HashBx,
DPool,
Rawpool,
Haominer,
Helix,
@@ -114,62 +136,152 @@ pub enum PoolId {
BinancePool,
Minerium,
LubianCom,
OKKONG,
AAOPool,
EMCDPool,
FoundryUSA,
SBICrypto,
Okkong,
AaoPool,
EmcdPool,
FoundryUsa,
SbiCrypto,
ArkPool,
PureBTCCom,
MARAPool,
PureBtcCom,
MaraPool,
KuCoinPool,
EntrustCharityPool,
OKMINER,
OkMiner,
Titan,
PEGAPool,
BTCNuggets,
PegaPool,
BtcNuggets,
CloudHashing,
DigitalXMintsy,
Telco214,
BTCPoolParty,
BtcPoolParty,
Multipool,
TransactionCoinMining,
BTCDig,
TrickysBTCPool,
BTCMP,
BtcDig,
TrickysBtcPool,
BtcMp,
Eobot,
UNOMP,
Unomp,
Patels,
GoGreenLight,
BitcoinIndiaCamel, // duplicate-ish entry preserved with slight name change
EkanemBTC,
CanoeUppercase,
TigerLowercase,
OneM1X,
EkanemBtc,
Canoe,
Tiger,
OneM1x,
Zulupool,
SECPOOL,
OCEAN,
SecPool,
Ocean,
WhitePool,
Wiz,
Mononaut,
Rijndael,
Wk057,
FutureBitApolloSolo,
Emzy,
Knorrium,
CarbonNegative,
PortlandHODL,
PortlandHodl,
Phoenix,
Neopool,
MaxiPool,
DrDetroit,
BitFuFuPool,
LuckyPool,
MiningDutch,
PublicPool,
MiningSquared,
InnopolisTech,
Nymkappa,
BTCLab,
BtcLab,
Parasite,
Dummy158,
Dummy159,
Dummy160,
Dummy161,
Dummy162,
Dummy163,
Dummy164,
Dummy165,
Dummy166,
Dummy167,
Dummy168,
Dummy169,
Dummy170,
Dummy171,
Dummy172,
Dummy173,
Dummy174,
Dummy175,
Dummy176,
Dummy177,
Dummy178,
Dummy179,
Dummy180,
Dummy181,
Dummy182,
Dummy183,
Dummy184,
Dummy185,
Dummy186,
Dummy187,
Dummy188,
Dummy189,
Dummy190,
Dummy191,
Dummy192,
Dummy193,
Dummy194,
Dummy195,
Dummy196,
Dummy197,
Dummy198,
Dummy199,
Dummy200,
Dummy201,
Dummy202,
Dummy203,
Dummy204,
Dummy205,
Dummy206,
Dummy207,
Dummy208,
Dummy209,
Dummy210,
Dummy211,
Dummy212,
Dummy213,
Dummy214,
Dummy215,
Dummy216,
Dummy217,
Dummy218,
Dummy219,
Dummy220,
Dummy221,
Dummy222,
Dummy223,
Dummy224,
Dummy225,
Dummy226,
Dummy227,
Dummy228,
Dummy229,
Dummy230,
Dummy231,
Dummy232,
Dummy233,
Dummy234,
Dummy235,
Dummy236,
Dummy237,
Dummy238,
Dummy239,
Dummy240,
Dummy241,
Dummy242,
Dummy243,
Dummy244,
Dummy245,
Dummy246,
Dummy247,
Dummy248,
Dummy249,
Dummy250,
Dummy251,
Dummy252,
Dummy253,
Dummy254,
Dummy255,
}

View File

@@ -1,7 +1,240 @@
use std::{collections::BTreeMap, path::Path};
use allocative::Allocative;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_store::AnyStore;
use brk_structs::{AddressBytes, Height, OutputIndex, OutputType};
use rayon::prelude::*;
use vecdb::{
AnyCollectableVec, AnyIterableVec, AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec,
PAGE_SIZE, RawVec, StoredIndex, VecIterator, Version,
};
mod id;
mod pool;
#[allow(clippy::module_inception)]
mod pools;
mod vecs;
pub use id::*;
pub use pool::*;
pub use pools::*;
use crate::{
chain,
indexes::{self, Indexes},
price,
};
#[derive(Clone, Allocative)]
pub struct Vecs {
db: Database,
pools: &'static Pools,
height_to_pool: RawVec<Height, PoolId>,
vecs: BTreeMap<PoolId, vecs::Vecs>,
}
impl Vecs {
pub fn forced_import(
parent_path: &Path,
parent_version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let db = Database::open(&parent_path.join("pools"))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
let pools = pools();
let version = parent_version + Version::new(3) + Version::new(pools.len() as u64);
let this = Self {
height_to_pool: RawVec::forced_import(&db, "pool", version + Version::ZERO)?,
vecs: pools
.iter()
.map(|pool| {
vecs::Vecs::forced_import(
&db,
pool.id,
pools,
version + Version::ZERO,
indexes,
price,
)
.map(|vecs| (pool.id, vecs))
})
.collect::<Result<BTreeMap<_, _>>>()?,
pools,
db,
};
this.db.retain_regions(
this.vecs()
.into_iter()
.flat_map(|v| v.region_names())
.collect(),
)?;
Ok(this)
}
pub fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
chain: &chain::Vecs,
price: Option<&price::Vecs>,
exit: &Exit,
) -> Result<()> {
self.compute_(indexer, indexes, starting_indexes, chain, price, exit)?;
self.db.flush_then_punch()?;
Ok(())
}
fn compute_(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
chain: &chain::Vecs,
price: Option<&price::Vecs>,
exit: &Exit,
) -> Result<()> {
self.compute_height_to_pool(indexer, indexes, starting_indexes, exit)?;
self.vecs.par_iter_mut().try_for_each(|(_, vecs)| {
vecs.compute(
indexer,
indexes,
starting_indexes,
&self.height_to_pool,
chain,
price,
exit,
)
})?;
Ok(())
}
fn compute_height_to_pool(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.height_to_pool.validate_computed_version_or_reset(
self.height_to_pool.version() + indexer.stores.height_to_coinbase_tag.version(),
)?;
let mut height_to_first_txindex_iter = indexer.vecs.height_to_first_txindex.iter();
let mut txindex_to_first_outputindex_iter =
indexer.vecs.txindex_to_first_outputindex.iter();
let mut txindex_to_output_count_iter = indexes.txindex_to_output_count.iter();
let mut outputindex_to_outputtype_iter = indexer.vecs.outputindex_to_outputtype.iter();
let mut outputindex_to_typeindex_iter = indexer.vecs.outputindex_to_typeindex.iter();
let mut p2pk65addressindex_to_p2pk65bytes_iter =
indexer.vecs.p2pk65addressindex_to_p2pk65bytes.iter();
let mut p2pk33addressindex_to_p2pk33bytes_iter =
indexer.vecs.p2pk33addressindex_to_p2pk33bytes.iter();
let mut p2pkhaddressindex_to_p2pkhbytes_iter =
indexer.vecs.p2pkhaddressindex_to_p2pkhbytes.iter();
let mut p2shaddressindex_to_p2shbytes_iter =
indexer.vecs.p2shaddressindex_to_p2shbytes.iter();
let mut p2wpkhaddressindex_to_p2wpkhbytes_iter =
indexer.vecs.p2wpkhaddressindex_to_p2wpkhbytes.iter();
let mut p2wshaddressindex_to_p2wshbytes_iter =
indexer.vecs.p2wshaddressindex_to_p2wshbytes.iter();
let mut p2traddressindex_to_p2trbytes_iter =
indexer.vecs.p2traddressindex_to_p2trbytes.iter();
let mut p2aaddressindex_to_p2abytes_iter = indexer.vecs.p2aaddressindex_to_p2abytes.iter();
let unknown = self.pools.get_unknown();
let min = starting_indexes
.height
.unwrap_to_usize()
.min(self.height_to_pool.len());
indexer
.stores
.height_to_coinbase_tag
.iter()
.skip(min)
.try_for_each(|(height, coinbase_tag)| -> Result<()> {
let txindex = height_to_first_txindex_iter.unwrap_get_inner(height);
let outputindex = txindex_to_first_outputindex_iter.unwrap_get_inner(txindex);
let outputcount = txindex_to_output_count_iter.unwrap_get_inner(txindex);
let pool = (*outputindex..(*outputindex + *outputcount))
.map(OutputIndex::from)
.find_map(|outputindex| {
let outputtype =
outputindex_to_outputtype_iter.unwrap_get_inner(outputindex);
let typeindex = outputindex_to_typeindex_iter.unwrap_get_inner(outputindex);
let address = match outputtype {
OutputType::P2PK65 => Some(AddressBytes::from(
p2pk65addressindex_to_p2pk65bytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2PK33 => Some(AddressBytes::from(
p2pk33addressindex_to_p2pk33bytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2PKH => Some(AddressBytes::from(
p2pkhaddressindex_to_p2pkhbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2SH => Some(AddressBytes::from(
p2shaddressindex_to_p2shbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2WPKH => Some(AddressBytes::from(
p2wpkhaddressindex_to_p2wpkhbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2WSH => Some(AddressBytes::from(
p2wshaddressindex_to_p2wshbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2TR => Some(AddressBytes::from(
p2traddressindex_to_p2trbytes_iter
.unwrap_get_inner(typeindex.into()),
)),
OutputType::P2A => Some(AddressBytes::from(
p2aaddressindex_to_p2abytes_iter.unwrap_get_inner(typeindex.into()),
)),
_ => None,
};
address
.and_then(|address| self.pools.find_from_address(&address.to_string()))
})
.or_else(|| self.pools.find_from_coinbase_tag(&coinbase_tag))
.unwrap_or(unknown);
self.height_to_pool.push_if_needed(height, pool.id)?;
Ok(())
})?;
self.height_to_pool.safe_flush(exit)?;
Ok(())
}
pub fn vecs(&self) -> Vec<&dyn AnyCollectableVec> {
[
self.vecs
.iter()
.flat_map(|(_, vecs)| vecs.vecs())
.collect::<Vec<_>>(),
vec![&self.height_to_pool],
]
.into_iter()
.flatten()
.collect::<Vec<_>>()
}
}

View File

@@ -1,10 +1,44 @@
use serde::{Deserialize, Serialize};
use allocative::Allocative;
use crate::pools::PoolId;
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Allocative)]
pub struct Pool {
pub id: PoolId,
pub name: &'static str,
pub addresses: Box<[&'static str]>,
pub tags: Box<[&'static str]>,
pub tags_lowercase: Box<[String]>,
pub link: &'static str,
}
impl Pool {
pub fn serialized_id(&self) -> String {
let value = serde_json::to_value(self.id).unwrap();
value.as_str().unwrap().to_string()
}
}
impl From<(usize, JSONPool)> for Pool {
fn from((index, pool): (usize, JSONPool)) -> Self {
Self {
id: (index as u8).into(),
name: pool.name,
addresses: pool.addresses,
tags_lowercase: pool
.tags
.iter()
.map(|t| t.to_lowercase())
.collect::<Vec<_>>()
.into_boxed_slice(),
tags: pool.tags,
link: pool.link,
}
}
}
#[derive(Debug)]
pub struct JSONPool {
pub name: &'static str,
pub addresses: Box<[&'static str]>,
pub tags: Box<[&'static str]>,

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,467 @@
use allocative::Allocative;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_structs::{Height, Sats, StoredF32, StoredU16, StoredU32};
use vecdb::{AnyCollectableVec, AnyIterableVec, Database, Exit, StoredIndex, VecIterator, Version};
use crate::{
PoolId, Pools, chain,
grouped::{
ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source,
VecBuilderOptions,
},
indexes::{self, Indexes},
price,
};
#[derive(Clone, Allocative)]
pub struct Vecs {
id: PoolId,
indexes_to_blocks_mined: ComputedVecsFromHeight<StoredU32>,
indexes_to_1w_blocks_mined: ComputedVecsFromDateIndex<StoredU32>,
indexes_to_1m_blocks_mined: ComputedVecsFromDateIndex<StoredU32>,
indexes_to_1y_blocks_mined: ComputedVecsFromDateIndex<StoredU32>,
indexes_to_subsidy: ComputedValueVecsFromHeight,
indexes_to_fee: ComputedValueVecsFromHeight,
indexes_to_coinbase: ComputedValueVecsFromHeight,
indexes_to_dominance: ComputedVecsFromDateIndex<StoredF32>,
indexes_to_1d_dominance: ComputedVecsFromDateIndex<StoredF32>,
indexes_to_1w_dominance: ComputedVecsFromDateIndex<StoredF32>,
indexes_to_1m_dominance: ComputedVecsFromDateIndex<StoredF32>,
indexes_to_1y_dominance: ComputedVecsFromDateIndex<StoredF32>,
indexes_to_days_since_block: ComputedVecsFromDateIndex<StoredU16>,
}
impl Vecs {
pub fn forced_import(
db: &Database,
id: PoolId,
pools: &Pools,
parent_version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let pool = pools.get(id);
let name = pool.serialized_id();
let suffix = |s: &str| format!("{name}_{s}");
let compute_dollars = price.is_some();
let version = parent_version + Version::ZERO;
Ok(Self {
id,
indexes_to_blocks_mined: ComputedVecsFromHeight::forced_import(
db,
&suffix("blocks_mined"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_sum().add_cumulative(),
)?,
indexes_to_1w_blocks_mined: ComputedVecsFromDateIndex::forced_import(
db,
&suffix("1w_blocks_mined"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_1m_blocks_mined: ComputedVecsFromDateIndex::forced_import(
db,
&suffix("1m_blocks_mined"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_1y_blocks_mined: ComputedVecsFromDateIndex::forced_import(
db,
&suffix("1y_blocks_mined"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_subsidy: ComputedValueVecsFromHeight::forced_import(
db,
&suffix("subsidy"),
Source::Compute,
version + Version::ZERO,
VecBuilderOptions::default().add_sum().add_cumulative(),
compute_dollars,
indexes,
)?,
indexes_to_fee: ComputedValueVecsFromHeight::forced_import(
db,
&suffix("fee"),
Source::Compute,
version + Version::ZERO,
VecBuilderOptions::default().add_sum().add_cumulative(),
compute_dollars,
indexes,
)?,
indexes_to_coinbase: ComputedValueVecsFromHeight::forced_import(
db,
&suffix("coinbase"),
Source::Compute,
version + Version::ZERO,
VecBuilderOptions::default().add_sum().add_cumulative(),
compute_dollars,
indexes,
)?,
indexes_to_dominance: ComputedVecsFromDateIndex::forced_import(
db,
&suffix("dominance"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_1d_dominance: ComputedVecsFromDateIndex::forced_import(
db,
&suffix("1d_dominance"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_1w_dominance: ComputedVecsFromDateIndex::forced_import(
db,
&suffix("1w_dominance"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_1m_dominance: ComputedVecsFromDateIndex::forced_import(
db,
&suffix("1m_dominance"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_1y_dominance: ComputedVecsFromDateIndex::forced_import(
db,
&suffix("1y_dominance"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_days_since_block: ComputedVecsFromDateIndex::forced_import(
db,
&suffix("days_since_block"),
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
})
}
#[allow(clippy::too_many_arguments)]
pub fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
height_to_pool: &impl AnyIterableVec<Height, PoolId>,
chain: &chain::Vecs,
price: Option<&price::Vecs>,
exit: &Exit,
) -> Result<()> {
self.indexes_to_blocks_mined.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_transform(
starting_indexes.height,
height_to_pool,
|(h, id, ..)| {
(
h,
if id == self.id {
StoredU32::ONE
} else {
StoredU32::ZERO
},
)
},
exit,
)?;
Ok(())
},
)?;
self.indexes_to_1w_blocks_mined.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|v, _, _, starting_indexes, exit| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_blocks_mined.dateindex.unwrap_sum(),
7,
exit,
)?;
Ok(())
},
)?;
self.indexes_to_1m_blocks_mined.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|v, _, _, starting_indexes, exit| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_blocks_mined.dateindex.unwrap_sum(),
30,
exit,
)?;
Ok(())
},
)?;
self.indexes_to_1y_blocks_mined.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|v, _, _, starting_indexes, exit| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_blocks_mined.dateindex.unwrap_sum(),
365,
exit,
)?;
Ok(())
},
)?;
let height_to_blocks_mined = self.indexes_to_blocks_mined.height.as_ref().unwrap();
self.indexes_to_subsidy.compute_all(
indexer,
indexes,
price,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_transform2(
starting_indexes.height,
height_to_blocks_mined,
chain.indexes_to_subsidy.sats.height.as_ref().unwrap(),
|(h, mined, sats, ..)| {
(
h,
if mined == StoredU32::ONE {
sats
} else {
Sats::ZERO
},
)
},
exit,
)?;
Ok(())
},
)?;
self.indexes_to_fee.compute_all(
indexer,
indexes,
price,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_transform2(
starting_indexes.height,
height_to_blocks_mined,
chain.indexes_to_fee.sats.height.unwrap_sum(),
|(h, mined, sats, ..)| {
(
h,
if mined == StoredU32::ONE {
sats
} else {
Sats::ZERO
},
)
},
exit,
)?;
Ok(())
},
)?;
self.indexes_to_coinbase.compute_all(
indexer,
indexes,
price,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_transform2(
starting_indexes.height,
height_to_blocks_mined,
chain.indexes_to_coinbase.sats.height.as_ref().unwrap(),
|(h, mined, sats, ..)| {
(
h,
if mined == StoredU32::ONE {
sats
} else {
Sats::ZERO
},
)
},
exit,
)?;
Ok(())
},
)?;
self.indexes_to_dominance.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_blocks_mined.dateindex.unwrap_cumulative(),
chain.indexes_to_block_count.dateindex.unwrap_cumulative(),
exit,
)?;
Ok(())
},
)?;
self.indexes_to_1d_dominance.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_blocks_mined.dateindex.unwrap_sum(),
chain.indexes_to_block_count.dateindex.unwrap_sum(),
exit,
)?;
Ok(())
},
)?;
self.indexes_to_1w_dominance.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_1w_blocks_mined.dateindex.as_ref().unwrap(),
chain.indexes_to_1w_block_count.dateindex.as_ref().unwrap(),
exit,
)?;
Ok(())
},
)?;
self.indexes_to_1m_dominance.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_1m_blocks_mined.dateindex.as_ref().unwrap(),
chain.indexes_to_1m_block_count.dateindex.as_ref().unwrap(),
exit,
)?;
Ok(())
},
)?;
self.indexes_to_1y_dominance.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|vec, _, _, starting_indexes, exit| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_1y_blocks_mined.dateindex.as_ref().unwrap(),
chain.indexes_to_1y_block_count.dateindex.as_ref().unwrap(),
exit,
)?;
Ok(())
},
)?;
self.indexes_to_days_since_block.compute_all(
indexer,
indexes,
starting_indexes,
exit,
|v, _, _, starting_indexes, exit| {
let mut prev = None;
v.compute_transform2(
starting_indexes.dateindex,
self.indexes_to_blocks_mined.dateindex.unwrap_sum(),
self.indexes_to_blocks_mined.dateindex.unwrap_cumulative(),
|(i, sum, cumulative, slf)| {
if prev.is_none() {
let i = i.unwrap_to_usize();
prev.replace(if i > 0 {
slf.into_iter().unwrap_get_inner_(i - 1)
} else {
StoredU16::ZERO
});
}
let days = if !cumulative.is_zero() && sum.is_zero() {
prev.unwrap() + StoredU16::ONE
} else {
StoredU16::ZERO
};
prev.replace(days);
(i, days)
},
exit,
)?;
Ok(())
},
)?;
Ok(())
}
pub fn vecs(&self) -> Vec<&dyn AnyCollectableVec> {
[
self.indexes_to_blocks_mined.vecs(),
self.indexes_to_1w_blocks_mined.vecs(),
self.indexes_to_1m_blocks_mined.vecs(),
self.indexes_to_1y_blocks_mined.vecs(),
self.indexes_to_subsidy.vecs(),
self.indexes_to_fee.vecs(),
self.indexes_to_coinbase.vecs(),
self.indexes_to_dominance.vecs(),
self.indexes_to_1d_dominance.vecs(),
self.indexes_to_1w_dominance.vecs(),
self.indexes_to_1m_dominance.vecs(),
self.indexes_to_1y_dominance.vecs(),
self.indexes_to_days_since_block.vecs(),
]
.into_iter()
.flatten()
.collect()
}
}

View File

@@ -23,9 +23,12 @@ pub use indexes::*;
pub use stores::*;
pub use vecs::*;
// One version for all data sources
// Increment on change OR addition
const VERSION: Version = Version::new(21);
const SNAPSHOT_BLOCK_RANGE: usize = 1_000;
const COLLISIONS_CHECKED_UP_TO: Height = Height::new(909_150);
const VERSION: Version = Version::ONE;
#[derive(Clone)]
pub struct Indexer {
@@ -39,10 +42,10 @@ impl Indexer {
let path = outputs_dir.join("indexed");
let vecs = Vecs::forced_import(&path, VERSION + Version::ZERO)?;
let vecs = Vecs::forced_import(&path, VERSION)?;
info!("Imported vecs");
let stores = Stores::forced_import(&path, VERSION + Version::ZERO)?;
let stores = Stores::forced_import(&path, VERSION)?;
info!("Imported stores");
Ok(Self { vecs, stores })

View File

@@ -27,8 +27,6 @@ pub struct Stores {
ByAddressType<Store<TypeIndexWithOutputindex, Unit>>,
}
const VERSION: Version = Version::ZERO;
impl Stores {
pub fn forced_import(parent: &Path, version: Version) -> Result<Self> {
let path = parent.join("stores");
@@ -49,34 +47,21 @@ impl Stores {
&keyspace,
&path,
"addressbyteshash_to_typeindex",
version + VERSION + Version::ZERO,
version,
None,
)
});
let blockhashprefix_to_height = scope.spawn(|| {
Store::import(
&keyspace,
&path,
"blockhashprefix_to_height",
version + VERSION + Version::ZERO,
None,
)
});
let txidprefix_to_txindex = scope.spawn(|| {
Store::import(
&keyspace,
&path,
"txidprefix_to_txindex",
version + VERSION + Version::ZERO,
None,
)
Store::import(&keyspace, &path, "blockhashprefix_to_height", version, None)
});
let txidprefix_to_txindex = scope
.spawn(|| Store::import(&keyspace, &path, "txidprefix_to_txindex", version, None));
let p2aaddressindex_with_outputindex = scope.spawn(|| {
Store::import(
&keyspace,
&path,
"p2aaddressindex_with_outputindex",
version + VERSION + Version::ZERO,
version,
Some(false),
)
});
@@ -85,7 +70,7 @@ impl Stores {
&keyspace,
&path,
"p2pk33addressindex_with_outputindex",
version + VERSION + Version::ZERO,
version,
Some(false),
)
});
@@ -94,7 +79,7 @@ impl Stores {
&keyspace,
&path,
"p2pk65addressindex_with_outputindex",
version + VERSION + Version::ZERO,
version,
Some(false),
)
});
@@ -103,7 +88,7 @@ impl Stores {
&keyspace,
&path,
"p2pkhaddressindex_with_outputindex",
version + VERSION + Version::ZERO,
version,
Some(false),
)
});
@@ -112,7 +97,7 @@ impl Stores {
&keyspace,
&path,
"p2shaddressindex_with_outputindex",
version + VERSION + Version::ZERO,
version,
Some(false),
)
});
@@ -121,7 +106,7 @@ impl Stores {
&keyspace,
&path,
"p2traddressindex_with_outputindex",
version + VERSION + Version::ZERO,
version,
Some(false),
)
});
@@ -130,7 +115,7 @@ impl Stores {
&keyspace,
&path,
"p2wpkhaddressindex_with_outputindex",
version + VERSION + Version::ZERO,
version,
Some(false),
)
});
@@ -139,18 +124,13 @@ impl Stores {
&keyspace,
&path,
"p2wshaddressindex_with_outputindex",
version + VERSION + Version::ZERO,
version,
Some(false),
)
});
let height_to_coinbase_tag = Store::import(
&keyspace,
&path,
"height_to_coinbase_tag",
version + VERSION + Version::ZERO,
None,
)?;
let height_to_coinbase_tag =
Store::import(&keyspace, &path, "height_to_coinbase_tag", version, None)?;
Ok(Self {
keyspace: keyspace.clone(),

View File

@@ -17,8 +17,6 @@ use vecdb::{
use crate::Indexes;
const VERSION: Version = Version::ZERO;
#[derive(Clone)]
pub struct Vecs {
db: Database,
@@ -78,222 +76,118 @@ impl Vecs {
db.set_min_len(PAGE_SIZE * 50_000_000)?;
let this = Self {
emptyoutputindex_to_txindex: CompressedVec::forced_import(
&db,
"txindex",
version + VERSION + Version::ZERO,
)?,
height_to_blockhash: RawVec::forced_import(
&db,
"blockhash",
version + VERSION + Version::ZERO,
)?,
height_to_difficulty: CompressedVec::forced_import(
&db,
"difficulty",
version + VERSION + Version::ZERO,
)?,
emptyoutputindex_to_txindex: CompressedVec::forced_import(&db, "txindex", version)?,
height_to_blockhash: RawVec::forced_import(&db, "blockhash", version)?,
height_to_difficulty: CompressedVec::forced_import(&db, "difficulty", version)?,
height_to_first_emptyoutputindex: CompressedVec::forced_import(
&db,
"first_emptyoutputindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_inputindex: CompressedVec::forced_import(
&db,
"first_inputindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_opreturnindex: CompressedVec::forced_import(
&db,
"first_opreturnindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_outputindex: CompressedVec::forced_import(
&db,
"first_outputindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_p2aaddressindex: CompressedVec::forced_import(
&db,
"first_p2aaddressindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_p2msoutputindex: CompressedVec::forced_import(
&db,
"first_p2msoutputindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_p2pk33addressindex: CompressedVec::forced_import(
&db,
"first_p2pk33addressindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_p2pk65addressindex: CompressedVec::forced_import(
&db,
"first_p2pk65addressindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_p2pkhaddressindex: CompressedVec::forced_import(
&db,
"first_p2pkhaddressindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_p2shaddressindex: CompressedVec::forced_import(
&db,
"first_p2shaddressindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_p2traddressindex: CompressedVec::forced_import(
&db,
"first_p2traddressindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_p2wpkhaddressindex: CompressedVec::forced_import(
&db,
"first_p2wpkhaddressindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_p2wshaddressindex: CompressedVec::forced_import(
&db,
"first_p2wshaddressindex",
version + VERSION + Version::ZERO,
)?,
height_to_first_txindex: CompressedVec::forced_import(
&db,
"first_txindex",
version + VERSION + Version::ZERO,
version,
)?,
height_to_first_txindex: CompressedVec::forced_import(&db, "first_txindex", version)?,
height_to_first_unknownoutputindex: CompressedVec::forced_import(
&db,
"first_unknownoutputindex",
version + VERSION + Version::ZERO,
)?,
height_to_timestamp: CompressedVec::forced_import(
&db,
"timestamp",
version + VERSION + Version::ZERO,
)?,
height_to_total_size: CompressedVec::forced_import(
&db,
"total_size",
version + VERSION + Version::ZERO,
)?,
height_to_weight: CompressedVec::forced_import(
&db,
"weight",
version + VERSION + Version::ZERO,
)?,
inputindex_to_outputindex: RawVec::forced_import(
&db,
"outputindex",
version + VERSION + Version::ZERO,
)?,
opreturnindex_to_txindex: CompressedVec::forced_import(
&db,
"txindex",
version + VERSION + Version::ZERO,
)?,
outputindex_to_outputtype: RawVec::forced_import(
&db,
"outputtype",
version + VERSION + Version::ZERO,
)?,
outputindex_to_typeindex: RawVec::forced_import(
&db,
"typeindex",
version + VERSION + Version::ZERO,
)?,
outputindex_to_value: RawVec::forced_import(
&db,
"value",
version + VERSION + Version::ZERO,
)?,
p2aaddressindex_to_p2abytes: RawVec::forced_import(
&db,
"p2abytes",
version + VERSION + Version::ZERO,
)?,
p2msoutputindex_to_txindex: CompressedVec::forced_import(
&db,
"txindex",
version + VERSION + Version::ZERO,
)?,
p2pk33addressindex_to_p2pk33bytes: RawVec::forced_import(
&db,
"p2pk33bytes",
version + VERSION + Version::ZERO,
)?,
p2pk65addressindex_to_p2pk65bytes: RawVec::forced_import(
&db,
"p2pk65bytes",
version + VERSION + Version::ZERO,
)?,
p2pkhaddressindex_to_p2pkhbytes: RawVec::forced_import(
&db,
"p2pkhbytes",
version + VERSION + Version::ZERO,
)?,
p2shaddressindex_to_p2shbytes: RawVec::forced_import(
&db,
"p2shbytes",
version + VERSION + Version::ZERO,
)?,
p2traddressindex_to_p2trbytes: RawVec::forced_import(
&db,
"p2trbytes",
version + VERSION + Version::ZERO,
)?,
p2wpkhaddressindex_to_p2wpkhbytes: RawVec::forced_import(
&db,
"p2wpkhbytes",
version + VERSION + Version::ZERO,
)?,
p2wshaddressindex_to_p2wshbytes: RawVec::forced_import(
&db,
"p2wshbytes",
version + VERSION + Version::ZERO,
)?,
txindex_to_base_size: CompressedVec::forced_import(
&db,
"base_size",
version + VERSION + Version::ZERO,
version,
)?,
height_to_timestamp: CompressedVec::forced_import(&db, "timestamp", version)?,
height_to_total_size: CompressedVec::forced_import(&db, "total_size", version)?,
height_to_weight: CompressedVec::forced_import(&db, "weight", version)?,
inputindex_to_outputindex: RawVec::forced_import(&db, "outputindex", version)?,
opreturnindex_to_txindex: CompressedVec::forced_import(&db, "txindex", version)?,
outputindex_to_outputtype: RawVec::forced_import(&db, "outputtype", version)?,
outputindex_to_typeindex: RawVec::forced_import(&db, "typeindex", version)?,
outputindex_to_value: RawVec::forced_import(&db, "value", version)?,
p2aaddressindex_to_p2abytes: RawVec::forced_import(&db, "p2abytes", version)?,
p2msoutputindex_to_txindex: CompressedVec::forced_import(&db, "txindex", version)?,
p2pk33addressindex_to_p2pk33bytes: RawVec::forced_import(&db, "p2pk33bytes", version)?,
p2pk65addressindex_to_p2pk65bytes: RawVec::forced_import(&db, "p2pk65bytes", version)?,
p2pkhaddressindex_to_p2pkhbytes: RawVec::forced_import(&db, "p2pkhbytes", version)?,
p2shaddressindex_to_p2shbytes: RawVec::forced_import(&db, "p2shbytes", version)?,
p2traddressindex_to_p2trbytes: RawVec::forced_import(&db, "p2trbytes", version)?,
p2wpkhaddressindex_to_p2wpkhbytes: RawVec::forced_import(&db, "p2wpkhbytes", version)?,
p2wshaddressindex_to_p2wshbytes: RawVec::forced_import(&db, "p2wshbytes", version)?,
txindex_to_base_size: CompressedVec::forced_import(&db, "base_size", version)?,
txindex_to_first_inputindex: CompressedVec::forced_import(
&db,
"first_inputindex",
version + VERSION + Version::ZERO,
version,
)?,
txindex_to_first_outputindex: CompressedVec::forced_import(
&db,
"first_outputindex",
version + VERSION + Version::ZERO,
version,
)?,
txindex_to_is_explicitly_rbf: CompressedVec::forced_import(
&db,
"is_explicitly_rbf",
version + VERSION + Version::ZERO,
)?,
txindex_to_rawlocktime: CompressedVec::forced_import(
&db,
"rawlocktime",
version + VERSION + Version::ZERO,
)?,
txindex_to_total_size: CompressedVec::forced_import(
&db,
"total_size",
version + VERSION + Version::ZERO,
)?,
txindex_to_txid: RawVec::forced_import(&db, "txid", version + VERSION + Version::ZERO)?,
txindex_to_txversion: CompressedVec::forced_import(
&db,
"txversion",
version + VERSION + Version::ZERO,
)?,
unknownoutputindex_to_txindex: CompressedVec::forced_import(
&db,
"txindex",
version + VERSION + Version::ZERO,
version,
)?,
txindex_to_rawlocktime: CompressedVec::forced_import(&db, "rawlocktime", version)?,
txindex_to_total_size: CompressedVec::forced_import(&db, "total_size", version)?,
txindex_to_txid: RawVec::forced_import(&db, "txid", version)?,
txindex_to_txversion: CompressedVec::forced_import(&db, "txversion", version)?,
unknownoutputindex_to_txindex: CompressedVec::forced_import(&db, "txindex", version)?,
db,
};

View File

@@ -4,6 +4,7 @@ use serde::Deserialize;
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum Format {
#[serde(alias = "json")]
JSON,

View File

@@ -29,20 +29,22 @@ impl<'de> Deserialize<'de> for MaybeIds {
{
match serde_json::Value::deserialize(deserializer)? {
serde_json::Value::String(str) => {
if str.len() > MAX_STRING_SIZE {
if str.len() <= MAX_STRING_SIZE {
Ok(MaybeIds(sanitize_ids(
str.split(",").map(|s| s.to_string()),
)))
} else {
dbg!(str.len(), MAX_STRING_SIZE);
Err(serde::de::Error::custom("Given parameter is too long"))
}
}
serde_json::Value::Array(vec) => {
if vec.len() > MAX_VECS {
if vec.len() <= MAX_VECS {
Ok(MaybeIds(sanitize_ids(
vec.into_iter().map(|s| s.as_str().unwrap().to_string()),
)))
} else {
dbg!(vec.len(), MAX_VECS);
Err(serde::de::Error::custom("Given parameter is too long"))
}
}

View File

@@ -12,6 +12,7 @@ use schemars::JsonSchema;
use serde::Deserialize;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum Index {
#[schemars(description = "Date/day index")]
DateIndex,

View File

@@ -6,7 +6,7 @@ use tabled::Tabled as TabledTabled;
use crate::Format;
#[derive(Debug, Serialize)]
#[serde(untagged)]
#[serde(untagged, rename_all = "lowercase")]
pub enum Output {
Json(Value),
CSV(String),

View File

@@ -1,7 +1,7 @@
use std::path::Path;
use bitcoincore_rpc::{Auth, Client, Result};
use brk_parser::Parser;
use brk_parser::{BlockExtended, Parser};
use brk_structs::Height;
#[allow(clippy::needless_doctest_main)]
@@ -21,55 +21,26 @@ fn main() -> Result<()> {
let parser = Parser::new(bitcoin_dir.join("blocks"), Some(brk_dir), rpc);
let start = None;
let end = None;
parser
.parse(start, end)
.iter()
.for_each(|(height, _block, hash)| {
println!("{height}: {}", hash);
});
// let start = None;
// let end = None;
// parser
// .parse(start, end)
// .iter()
// .for_each(|(height, _block, hash)| {
// println!("{height}: {}", hash);
// });
let block_0 = parser.get(Height::new(0));
println!(
"{}",
block_0
.txdata
.first()
.unwrap()
.output
.first()
.unwrap()
.script_pubkey
);
dbg!("{}", block_0.coinbase_tag());
let block_158251 = parser.get(Height::new(158251));
println!(
"{}",
block_158251
.txdata
.first()
.unwrap()
.output
.first()
.unwrap()
.script_pubkey
);
dbg!("{}", block_158251.coinbase_tag());
let block_173195 = parser.get(Height::new(173195));
dbg!("{}", block_173195.coinbase_tag());
let block_840_000 = parser.get(Height::new(840_004));
println!(
"{}",
block_840_000
.txdata
.first()
.unwrap()
.output
.first()
.unwrap()
.value
);
dbg!("{}", block_840_000.coinbase_tag());
dbg!(i.elapsed());

View File

@@ -1,19 +1,20 @@
use std::borrow::Cow;
use bitcoin::Block;
pub trait BlockExtended {
fn coinbase_tag(&self) -> String;
fn coinbase_tag(&self) -> Cow<'_, str>;
}
impl BlockExtended for Block {
fn coinbase_tag(&self) -> String {
let Some(input) = self.txdata.first().and_then(|tx| tx.input.first()) else {
return String::new();
};
let bytes = input.script_sig.as_bytes();
String::from_utf8_lossy(bytes)
.chars()
.filter(|&c| c != '\u{FFFD}' && (c >= ' ' || c == '\n' || c == '\r' || c == '\t'))
.take(1_024)
.collect()
fn coinbase_tag(&self) -> Cow<'_, str> {
String::from_utf8_lossy(
self.txdata
.first()
.and_then(|tx| tx.input.first())
.unwrap()
.script_sig
.as_bytes(),
)
}
}

View File

@@ -10,6 +10,8 @@ rust-version.workspace = true
build = "build.rs"
[dependencies]
allocative = { workspace = true }
allocative_derive = { workspace = true }
bitcoin = { workspace = true }
bitcoincore-rpc = { workspace = true }
brk_error = {workspace = true}

View File

@@ -25,6 +25,25 @@ pub enum AddressBytes {
P2A(P2ABytes),
}
impl fmt::Display for AddressBytes {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
AddressBytes::P2PK65(bytes) => bytes.to_string(),
AddressBytes::P2PK33(bytes) => bytes.to_string(),
AddressBytes::P2PKH(bytes) => bytes.to_string(),
AddressBytes::P2SH(bytes) => bytes.to_string(),
AddressBytes::P2WPKH(bytes) => bytes.to_string(),
AddressBytes::P2WSH(bytes) => bytes.to_string(),
AddressBytes::P2TR(bytes) => bytes.to_string(),
AddressBytes::P2A(bytes) => bytes.to_string(),
}
)
}
}
impl AddressBytes {
pub fn as_slice(&self) -> &[u8] {
match self {

View File

@@ -44,6 +44,7 @@ impl Serialize for AnyAddressIndex {
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum AnyAddressDataIndexEnum {
Loaded(LoadedAddressIndex),
Empty(EmptyAddressIndex),

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div, Mul},
};
use allocative::Allocative;
use serde::Serialize;
use vecdb::{CheckedSub, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -20,6 +21,7 @@ use super::{Sats, StoredF64};
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct Bitcoin(f64);

View File

@@ -3,10 +3,11 @@ use std::{
ops::{Add, Rem},
};
use allocative::Allocative;
use brk_error::Error;
use vecdb::{CheckedSub, FromCoarserIndex, Printable, StoredCompressed};
use jiff::Span;
use serde::Serialize;
use vecdb::{CheckedSub, FromCoarserIndex, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::{DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, WeekIndex, YearIndex};
@@ -28,6 +29,7 @@ use super::Date;
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct DateIndex(u16);

View File

@@ -3,8 +3,9 @@ use std::{
ops::{Add, AddAssign, Div},
};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use allocative::Allocative;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use super::{Date, DateIndex, YearIndex};
@@ -25,6 +26,7 @@ use super::{Date, DateIndex, YearIndex};
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct DecadeIndex(u16);

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div},
};
use allocative::Allocative;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -25,6 +26,7 @@ use super::Height;
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct DifficultyEpoch(u16);

View File

@@ -4,6 +4,7 @@ use std::{
ops::{Add, AddAssign, Div, Mul},
};
use allocative::Allocative;
use derive_deref::Deref;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, StoredCompressed};
@@ -24,6 +25,7 @@ use super::{Bitcoin, Cents, Close, High, Sats, StoredF32, StoredF64};
Serialize,
Deserialize,
StoredCompressed,
Allocative,
)]
pub struct Dollars(f64);

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div},
};
use allocative::Allocative;
use serde::Serialize;
use vecdb::StoredCompressed;
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -10,7 +11,16 @@ use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use super::{Sats, StoredU64};
#[derive(
Debug, Clone, Copy, Serialize, FromBytes, Immutable, IntoBytes, KnownLayout, StoredCompressed,
Debug,
Clone,
Copy,
Serialize,
FromBytes,
Immutable,
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct FeeRate(f64);

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div},
};
use allocative::Allocative;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -25,6 +26,7 @@ use super::Height;
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct HalvingEpoch(u16);

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Rem},
};
use allocative::Allocative;
use bitcoincore_rpc::{Client, RpcApi};
use byteview::ByteView;
use derive_deref::Deref;
@@ -32,6 +33,7 @@ use super::StoredU64;
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct Height(u32);

View File

@@ -1,5 +1,6 @@
use std::ops::{Add, AddAssign};
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use serde::Serialize;
use vecdb::{CheckedSub, Printable, StoredCompressed};
@@ -24,6 +25,7 @@ use super::Vin;
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct InputIndex(u64);

View File

@@ -3,8 +3,9 @@ use std::{
ops::{Add, AddAssign, Div},
};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use allocative::Allocative;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use super::{Date, DateIndex, YearIndex};
@@ -25,6 +26,7 @@ use super::{Date, DateIndex, YearIndex};
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct MonthIndex(u16);

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div},
};
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use serde::{Serialize, Serializer, ser::SerializeTuple};
use vecdb::StoredCompressed;
@@ -473,6 +474,7 @@ where
DerefMut,
Serialize,
StoredCompressed,
Allocative,
)]
#[repr(transparent)]
pub struct Close<T>(T);

View File

@@ -1,5 +1,6 @@
use std::ops::{Add, AddAssign};
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use serde::Serialize;
use vecdb::{CheckedSub, Printable, StoredCompressed};
@@ -26,6 +27,7 @@ use super::Vout;
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct OutputIndex(u64);

View File

@@ -16,6 +16,7 @@ use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
KnownLayout,
Serialize,
)]
#[serde(rename_all = "lowercase")]
#[repr(u8)]
pub enum OutputType {
P2PK65,

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div},
};
use allocative::Allocative;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -25,6 +26,7 @@ use super::MonthIndex;
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct QuarterIndex(u16);

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div, Mul, SubAssign},
};
use allocative::Allocative;
use bitcoin::Amount;
use derive_deref::Deref;
use serde::{Deserialize, Serialize};
@@ -30,6 +31,7 @@ use super::{Bitcoin, Cents, Dollars, Height};
Serialize,
Deserialize,
StoredCompressed,
Allocative,
)]
pub struct Sats(u64);

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div},
};
use allocative::Allocative;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -25,6 +26,7 @@ use super::MonthIndex;
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct SemesterIndex(u16);

View File

@@ -1,3 +1,4 @@
use allocative::Allocative;
use derive_deref::Deref;
use serde::Serialize;
use vecdb::{Printable, StoredCompressed};
@@ -19,6 +20,7 @@ use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct StoredBool(u16);

View File

@@ -6,11 +6,14 @@ use std::{
ops::{Add, AddAssign, Div, Mul, Sub},
};
use allocative::Allocative;
use derive_deref::Deref;
use serde::Serialize;
use vecdb::{CheckedSub, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::{Close, StoredU32};
use super::{Dollars, StoredF64};
#[derive(
@@ -25,6 +28,7 @@ use super::{Dollars, StoredF64};
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct StoredF32(f32);
@@ -65,12 +69,24 @@ impl From<usize> for StoredF32 {
}
}
impl From<StoredU32> for StoredF32 {
fn from(value: StoredU32) -> Self {
Self(f32::from(value))
}
}
impl CheckedSub<StoredF32> for StoredF32 {
fn checked_sub(self, rhs: Self) -> Option<Self> {
Some(Self(self.0 - rhs.0))
}
}
impl CheckedSub<usize> for StoredF32 {
fn checked_sub(self, rhs: usize) -> Option<Self> {
Some(Self(self.0 - rhs as f32))
}
}
impl Div<usize> for StoredF32 {
type Output = Self;
fn div(self, rhs: usize) -> Self::Output {
@@ -78,6 +94,13 @@ impl Div<usize> for StoredF32 {
}
}
impl Div<StoredU32> for StoredF32 {
type Output = Self;
fn div(self, rhs: StoredU32) -> Self::Output {
Self(self.0 / f32::from(rhs))
}
}
impl Add for StoredF32 {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
@@ -103,6 +126,12 @@ impl From<Dollars> for StoredF32 {
}
}
impl From<Close<Dollars>> for StoredF32 {
fn from(value: Close<Dollars>) -> Self {
Self::from(*value)
}
}
impl Div<Dollars> for StoredF32 {
type Output = Self;
fn div(self, rhs: Dollars) -> Self::Output {

View File

@@ -4,6 +4,7 @@ use std::{
ops::{Add, AddAssign, Div, Mul},
};
use allocative::Allocative;
use derive_deref::Deref;
use serde::Serialize;
use vecdb::{CheckedSub, Printable, StoredCompressed};
@@ -23,6 +24,7 @@ use crate::{Bitcoin, Dollars};
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct StoredF64(f64);

View File

@@ -1,3 +1,5 @@
use std::borrow::Cow;
use byteview::ByteView;
use derive_deref::Deref;
use serde::Serialize;
@@ -26,6 +28,12 @@ impl From<String> for StoredString {
}
}
impl From<Cow<'_, str>> for StoredString {
fn from(value: Cow<'_, str>) -> Self {
Self(value.to_string())
}
}
impl From<ByteView> for StoredString {
fn from(value: ByteView) -> Self {
let bytes = &*value;

View File

@@ -1,5 +1,6 @@
use std::ops::{Add, AddAssign, Div};
use allocative::Allocative;
use derive_deref::Deref;
use serde::Serialize;
use vecdb::{CheckedSub, Printable, StoredCompressed};
@@ -27,11 +28,13 @@ use super::{
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct StoredU16(u16);
impl StoredU16 {
pub const ZERO: Self = Self(0);
pub const ONE: Self = Self(1);
pub fn new(v: u16) -> Self {
Self(v)

View File

@@ -1,5 +1,6 @@
use std::ops::{Add, AddAssign, Div};
use std::ops::{Add, AddAssign, Div, Mul};
use allocative::Allocative;
use derive_deref::Deref;
use serde::Serialize;
use vecdb::{CheckedSub, Printable, StoredCompressed};
@@ -27,15 +28,21 @@ use super::{
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct StoredU32(u32);
impl StoredU32 {
pub const ZERO: Self = Self(0);
pub const ONE: Self = Self(1);
pub fn new(counter: u32) -> Self {
Self(counter)
}
pub fn is_zero(&self) -> bool {
self.0 == 0
}
}
impl From<u32> for StoredU32 {
@@ -44,6 +51,12 @@ impl From<u32> for StoredU32 {
}
}
impl From<StoredU32> for f32 {
fn from(value: StoredU32) -> Self {
value.0 as f32
}
}
impl From<usize> for StoredU32 {
fn from(value: usize) -> Self {
if value > u32::MAX as usize {
@@ -59,6 +72,15 @@ impl CheckedSub<StoredU32> for StoredU32 {
}
}
impl CheckedSub<usize> for StoredU32 {
fn checked_sub(self, rhs: usize) -> Option<Self> {
if rhs > u32::MAX as usize {
panic!()
}
self.0.checked_sub(rhs as u32).map(Self)
}
}
impl Div<usize> for StoredU32 {
type Output = Self;
fn div(self, rhs: usize) -> Self::Output {
@@ -79,6 +101,17 @@ impl AddAssign for StoredU32 {
}
}
impl Mul<usize> for StoredU32 {
type Output = Self;
fn mul(self, rhs: usize) -> Self::Output {
let res = self.0 as usize * rhs;
if res > u32::MAX as usize {
panic!()
}
Self::from(res)
}
}
impl From<f64> for StoredU32 {
fn from(value: f64) -> Self {
if value < 0.0 || value > u32::MAX as f64 {

View File

@@ -1,5 +1,6 @@
use std::ops::{Add, AddAssign, Div};
use allocative::Allocative;
use derive_deref::Deref;
use serde::Serialize;
use vecdb::{CheckedSub, Printable, StoredCompressed};
@@ -28,6 +29,7 @@ use super::{
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct StoredU64(u64);

View File

@@ -1,5 +1,6 @@
use std::ops::{Add, AddAssign, Div};
use allocative::Allocative;
use derive_deref::Deref;
use jiff::{civil::date, tz::TimeZone};
use serde::Serialize;
@@ -23,6 +24,7 @@ use super::Date;
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct Timestamp(u32);

View File

@@ -1,5 +1,6 @@
use std::ops::{Add, AddAssign};
use allocative::Allocative;
use byteview::ByteView;
use derive_deref::{Deref, DerefMut};
use serde::Serialize;
@@ -27,6 +28,7 @@ use super::StoredU32;
KnownLayout,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct TxIndex(u32);

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div},
};
use allocative::Allocative;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -25,6 +26,7 @@ use super::{Date, DateIndex};
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct WeekIndex(u16);

View File

@@ -1,5 +1,6 @@
use std::ops::{Add, AddAssign, Div};
use allocative::Allocative;
use derive_deref::Deref;
use serde::Serialize;
use vecdb::StoredCompressed;
@@ -20,6 +21,7 @@ use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
FromBytes,
Serialize,
StoredCompressed,
Allocative,
)]
pub struct Weight(u64);

View File

@@ -3,6 +3,7 @@ use std::{
ops::{Add, AddAssign, Div},
};
use allocative::Allocative;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Printable, StoredCompressed};
use zerocopy_derive::{FromBytes, Immutable, IntoBytes, KnownLayout};
@@ -25,6 +26,7 @@ use super::{Date, DateIndex, MonthIndex};
IntoBytes,
KnownLayout,
StoredCompressed,
Allocative,
)]
pub struct YearIndex(u16);

View File

@@ -1266,7 +1266,7 @@
}
td {
text-transform: lowercase;
text-transform: none;
}
a {

File diff suppressed because it is too large Load Diff

View File

@@ -124,9 +124,10 @@
* @param {Env} args.env
* @param {Colors} args.colors
* @param {VecIdToIndexes} args.vecIdToIndexes
* @param {Pools} args.pools
* @returns {PartialOptionsTree}
*/
function createPartialOptions({ env, colors, vecIdToIndexes }) {
function createPartialOptions({ env, colors, vecIdToIndexes, pools }) {
/**
* @template {string} S
* @typedef {Extract<VecId, `${S}${string}`>} StartsWith
@@ -3343,6 +3344,24 @@ function createPartialOptions({ env, colors, vecIdToIndexes }) {
lineStyle: 4,
},
}),
createBaseSeries({
key: "1w_block_count",
name: "1w sum",
color: colors.red,
defaultActive: false,
}),
createBaseSeries({
key: "1m_block_count",
name: "1m sum",
color: colors.pink,
defaultActive: false,
}),
createBaseSeries({
key: "1y_block_count",
name: "1y sum",
color: colors.purple,
defaultActive: false,
}),
],
},
{
@@ -3521,87 +3540,116 @@ function createPartialOptions({ env, colors, vecIdToIndexes }) {
],
},
{
name: "Coinbase",
title: "Coinbase",
bottom: [
...createBaseAverageSumCumulativeMinMaxPercentilesSeries({
key: "coinbase",
name: "Rewards",
tree: [
{
name: "Coinbase",
}),
...createBaseAverageSumCumulativeMinMaxPercentilesSeries({
key: "coinbase_in_btc",
name: "Coinbase",
}),
...createBaseAverageSumCumulativeMinMaxPercentilesSeries({
key: "coinbase_in_usd",
name: "Coinbase",
}),
],
},
{
name: "Subsidy",
title: "Subsidy",
bottom: [
...createBaseAverageSumCumulativeMinMaxPercentilesSeries({
key: "subsidy",
title: "Coinbase",
bottom: [
...createBaseAverageSumCumulativeMinMaxPercentilesSeries(
{
key: "coinbase",
name: "Coinbase",
},
),
...createBaseAverageSumCumulativeMinMaxPercentilesSeries(
{
key: "coinbase_in_btc",
name: "Coinbase",
},
),
...createBaseAverageSumCumulativeMinMaxPercentilesSeries(
{
key: "coinbase_in_usd",
name: "Coinbase",
},
),
],
},
{
name: "Subsidy",
}),
createBaseSeries({
key: "subsidy_usd_1y_sma",
name: "1y sma",
}),
...createBaseAverageSumCumulativeMinMaxPercentilesSeries({
key: "subsidy_in_btc",
name: "Subsidy",
}),
...createBaseAverageSumCumulativeMinMaxPercentilesSeries({
key: "subsidy_in_usd",
name: "Subsidy",
}),
],
},
{
name: "Fee",
title: "Transaction Fee",
bottom: [
...createAverageSumCumulativeMinMaxPercentilesSeries("fee"),
...createAverageSumCumulativeMinMaxPercentilesSeries(
"fee_in_btc",
),
...createAverageSumCumulativeMinMaxPercentilesSeries(
"fee_in_usd",
),
],
},
{
name: "Dominance",
title: "Reward Dominance",
bottom: [
createBaseSeries({
key: "fee_dominance",
title: "Subsidy",
bottom: [
...createBaseAverageSumCumulativeMinMaxPercentilesSeries(
{
key: "subsidy",
name: "Subsidy",
},
),
createBaseSeries({
key: "subsidy_usd_1y_sma",
name: "1y sma",
}),
...createBaseAverageSumCumulativeMinMaxPercentilesSeries(
{
key: "subsidy_in_btc",
name: "Subsidy",
},
),
...createBaseAverageSumCumulativeMinMaxPercentilesSeries(
{
key: "subsidy_in_usd",
name: "Subsidy",
},
),
],
},
{
name: "Fee",
color: colors.amber,
}),
createBaseSeries({
key: "subsidy_dominance",
name: "Subsidy",
color: colors.red,
}),
],
},
{
name: "Unclaimed Rewards",
title: "Unclaimed Rewards",
bottom: [
...createSumCumulativeSeries({
concat: "unclaimed_rewards",
}),
...createSumCumulativeSeries({
concat: "unclaimed_rewards_in_btc",
}),
...createSumCumulativeSeries({
concat: "unclaimed_rewards_in_usd",
}),
title: "Transaction Fee",
bottom: [
...createAverageSumCumulativeMinMaxPercentilesSeries(
"fee",
),
...createAverageSumCumulativeMinMaxPercentilesSeries(
"fee_in_btc",
),
...createAverageSumCumulativeMinMaxPercentilesSeries(
"fee_in_usd",
),
],
},
{
name: "Dominance",
title: "Reward Dominance",
bottom: [
createBaseSeries({
key: "fee_dominance",
name: "Fee",
color: colors.amber,
}),
createBaseSeries({
key: "subsidy_dominance",
name: "Subsidy",
color: colors.red,
}),
],
},
{
name: "Unclaimed",
title: "Unclaimed Rewards",
bottom: [
...createSumCumulativeSeries({
concat: "unclaimed_rewards",
}),
...createSumCumulativeSeries({
concat: "unclaimed_rewards_in_btc",
}),
...createSumCumulativeSeries({
concat: "unclaimed_rewards_in_usd",
}),
],
},
{
name: "Puell multiple",
title: "Puell multiple",
bottom: [
createBaseSeries({
key: "puell_multiple",
name: "Multiple",
}),
],
},
],
},
{
@@ -3614,16 +3662,6 @@ function createPartialOptions({ env, colors, vecIdToIndexes }) {
}),
],
},
{
name: "Puell multiple",
title: "Puell multiple",
bottom: [
createBaseSeries({
key: "puell_multiple",
name: "Multiple",
}),
],
},
{
name: "Difficulty",
title: "Difficulty",
@@ -3695,6 +3733,165 @@ function createPartialOptions({ env, colors, vecIdToIndexes }) {
}),
],
},
{
name: "Pools",
tree: Object.entries(pools).map(([_key, name]) => {
const key = /** @type {Pool} */ (_key);
return {
name,
tree: [
// indexes_to_dominance: ComputedVecsFromDateIndex<StoredF32>,
// indexes_to_1d_dominance: ComputedVecsFromDateIndex<StoredF32>,
// indexes_to_1w_dominance: ComputedVecsFromDateIndex<StoredF32>,
// indexes_to_1m_dominance: ComputedVecsFromDateIndex<StoredF32>,
// indexes_to_1y_dominance: ComputedVecsFromDateIndex<StoredF32>,
// indexes_to_days_since_block: ComputedVecsFromDateIndex<StoredU16>,
{
name: "Dominance",
title: `Dominance of ${name}`,
bottom: [
createBaseSeries({
key: `${key}_1d_dominance`,
name: "1d",
color: colors.rose,
defaultActive: false,
}),
createBaseSeries({
key: `${key}_1w_dominance`,
name: "1w",
color: colors.red,
defaultActive: false,
}),
createBaseSeries({
key: `${key}_1m_dominance`,
name: "1m",
}),
createBaseSeries({
key: `${key}_1y_dominance`,
name: "1y",
color: colors.lime,
defaultActive: false,
}),
createBaseSeries({
key: `${key}_dominance`,
name: "all time",
color: colors.teal,
defaultActive: false,
}),
],
},
{
name: "Blocks mined",
title: `Blocks mined by ${name}`,
bottom: [
createBaseSeries({
key: `${key}_blocks_mined`,
name: "Sum",
}),
createBaseSeries({
key: `${key}_blocks_mined_cumulative`,
name: "Cumulative",
color: colors.blue,
}),
createBaseSeries({
key: `${key}_1w_blocks_mined`,
name: "1w Sum",
color: colors.red,
defaultActive: false,
}),
createBaseSeries({
key: `${key}_1m_blocks_mined`,
name: "1m Sum",
color: colors.pink,
defaultActive: false,
}),
createBaseSeries({
key: `${key}_1y_blocks_mined`,
name: "1y Sum",
color: colors.purple,
defaultActive: false,
}),
],
},
{
name: "Rewards",
tree: [
{
name: "coinbase",
title: `coinbase collected by ${name}`,
bottom: [
...createSumCumulativeSeries({
concat: `${key}_coinbase`,
common: "coinbase",
// cumulativeColor: colors.
}),
...createSumCumulativeSeries({
concat: `${key}_coinbase_in_btc`,
common: "coinbase",
// cumulativeColor: colors.
}),
...createSumCumulativeSeries({
concat: `${key}_coinbase_in_usd`,
common: "coinbase",
}),
],
},
{
name: "subsidy",
title: `subsidy collected by ${name}`,
bottom: [
...createSumCumulativeSeries({
concat: `${key}_subsidy`,
common: "subsidy",
// cumulativeColor: colors.
}),
...createSumCumulativeSeries({
concat: `${key}_subsidy_in_btc`,
common: "subsidy",
// cumulativeColor: colors.
}),
...createSumCumulativeSeries({
concat: `${key}_subsidy_in_usd`,
common: "subsidy",
}),
],
},
{
name: "fees",
title: `fees collected by ${name}`,
bottom: [
...createSumCumulativeSeries({
concat: `${key}_fee`,
common: "fee",
// cumulativeColor: colors.
}),
...createSumCumulativeSeries({
concat: `${key}_fee_in_btc`,
common: "fee",
// cumulativeColor: colors.
}),
...createSumCumulativeSeries({
concat: `${key}_fee_in_usd`,
common: "fee",
}),
],
},
],
},
{
name: "Days since block",
title: `Days since ${name} mined a block`,
bottom: [
createBaseSeries({
key: `${key}_days_since_block`,
name: "Raw",
}),
],
},
],
};
}),
},
],
},
{
@@ -4184,6 +4381,7 @@ function createPartialOptions({ env, colors, vecIdToIndexes }) {
* @param {Env} args.env
* @param {Utilities} args.utils
* @param {VecIdToIndexes} args.vecIdToIndexes
* @param {Pools} args.pools
* @param {Signal<string | null>} args.qrcode
*/
export function initOptions({
@@ -4193,6 +4391,7 @@ export function initOptions({
utils,
qrcode,
vecIdToIndexes,
pools,
}) {
const LS_SELECTED_KEY = `selected_path`;
@@ -4208,7 +4407,12 @@ export function initOptions({
/** @type {Signal<Option>} */
const selected = signals.createSignal(/** @type {any} */ (undefined));
const partialOptions = createPartialOptions({ env, colors, vecIdToIndexes });
const partialOptions = createPartialOptions({
env,
colors,
vecIdToIndexes,
pools,
});
/** @type {Option[]} */
const list = [];

View File

@@ -515,12 +515,15 @@ function createIndexToVecIds(vecIdToIndexes) {
/**
* @param {Object} args
* @param {number | OHLCTuple} args.value
* @param {number | string | Object | Array<any>} args.value
* @param {Unit} args.unit
*/
function serializeValue({ value, unit }) {
if (typeof value !== "number") {
return JSON.stringify(value);
const t = typeof value;
if (typeof value === "string") {
return value;
} else if (t !== "number") {
return JSON.stringify(value).replaceAll('"', "").slice(1, -1);
} else if (value !== 18446744073709552000) {
if (unit === "USD" || unit === "Difficulty" || unit === "sat/vB") {
return value.toLocaleString("en-us", {