diff --git a/.gitignore b/.gitignore index 2529d6b02..5af3b6170 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,7 @@ bridge/ _* !__*.py /*.md +/api.json # Logs *.log* diff --git a/Cargo.lock b/Cargo.lock index 730e96d2b..e659ecf59 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -211,7 +211,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -339,9 +339,9 @@ checksum = "32637268377fc7b10a8c6d51de3e7fba1ce5dd371a96e342b34e6078db558e7f" [[package]] name = "better_io" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92fde17f91e7ba10b2a07f8dff29530b77144894bc6ae850fbc66e1276af0d28" +checksum = "ef0a3155e943e341e557863e69a708999c94ede624e37865c8e2a91b94efa78f" [[package]] name = "bincode" @@ -378,7 +378,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -525,13 +525,13 @@ name = "brk" version = "0.1.0-alpha.1" dependencies = [ "brk_bencher", - "brk_binder", + "brk_bindgen", "brk_bundler", "brk_client", + "brk_cohort", "brk_computer", "brk_error", "brk_fetcher", - "brk_grouper", "brk_indexer", "brk_iterator", "brk_logger", @@ -576,7 +576,21 @@ dependencies = [ name = "brk_binder" version = "0.1.0-alpha.1" dependencies = [ - "brk_grouper", + "brk_cohort", + "brk_query", + "brk_types", + "oas3", + "schemars", + "serde", + "serde_json", + "vecdb", +] + +[[package]] +name = "brk_bindgen" +version = "0.1.0-alpha.1" +dependencies = [ + "brk_cohort", "brk_query", "brk_types", "oas3", @@ -591,6 +605,7 @@ name = "brk_bundler" version = "0.1.0-alpha.1" dependencies = [ "brk_rolldown", + "env_logger", "log", "notify", "sugar_path", @@ -602,7 +617,7 @@ name = "brk_cli" version = "0.1.0-alpha.1" dependencies = [ "brk_alloc", - "brk_binder", + "brk_bindgen", "brk_bundler", "brk_computer", "brk_error", @@ -630,12 +645,24 @@ dependencies = [ name = "brk_client" version = "0.1.0-alpha.1" dependencies = [ - "brk_grouper", + "brk_cohort", "brk_types", "minreq", "serde", ] +[[package]] +name = "brk_cohort" +version = "0.1.0-alpha.1" +dependencies = [ + "brk_error", + "brk_traversable", + "brk_types", + "rayon", + "serde", + "vecdb", +] + [[package]] name = "brk_computer" version = "0.1.0-alpha.1" @@ -643,9 +670,9 @@ dependencies = [ "bitcoin", "brk_alloc", "brk_bencher", + "brk_cohort", "brk_error", "brk_fetcher", - "brk_grouper", "brk_indexer", "brk_iterator", "brk_logger", @@ -693,18 +720,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "brk_grouper" -version = "0.1.0-alpha.1" -dependencies = [ - "brk_error", - "brk_traversable", - "brk_types", - "rayon", - "serde", - "vecdb", -] - [[package]] name = "brk_indexer" version = "0.1.0-alpha.1" @@ -712,8 +727,8 @@ dependencies = [ "bitcoin", "brk_alloc", "brk_bencher", + "brk_cohort", "brk_error", - "brk_grouper", "brk_iterator", "brk_logger", "brk_reader", @@ -857,14 +872,14 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] name = "brk_rolldown" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b663fc71e6bfb55bb85706acbb12ad20c69f2034dc63fab16661ad83e96a4acc" +checksum = "e4a03eb8d04b8b9d23aa81eaf2f13328396ee32501132b70c5ad299a03c098a3" dependencies = [ "anyhow", "append-only-vec", @@ -917,9 +932,9 @@ dependencies = [ [[package]] name = "brk_rolldown_common" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18b8ec7db68a6e506204f1c3a52a3430236d679ce333adc16a022f728ec38e5e" +checksum = "5bcba1505be9175212c59de099e29b06caff51a0391ade1fe2f778ef7fe62ac5" dependencies = [ "anyhow", "arcstr", @@ -949,9 +964,9 @@ dependencies = [ [[package]] name = "brk_rolldown_dev_common" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e30475c1f8e9e717cac29880d4ed625add518cc1b271e8f95e4099aeac1578cf" +checksum = "af076c673b21965be6817828da3d0abb036740f936cbd2a4745320f41d50c7b9" dependencies = [ "brk_rolldown_common", "brk_rolldown_error", @@ -960,9 +975,9 @@ dependencies = [ [[package]] name = "brk_rolldown_devtools" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fdf5ff0cdad5b1097d75059c28a70c150be3cd2bee12bb2dbd23499f5b07630" +checksum = "1fe858727eb5c5fa658b3278f0b77c4a346777b9ab7f80281227544ddea240a0" dependencies = [ "blake3", "brk_rolldown_devtools_action", @@ -976,9 +991,9 @@ dependencies = [ [[package]] name = "brk_rolldown_devtools_action" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b1791f7534249a71106fdbe3865dab344fad298e9cba6b2d2ba43931ffef62" +checksum = "58ce01745ba57a89ff281495195f2a1dd1a9f1827ea97b8cdb0f456dd70aad7e" dependencies = [ "serde", "ts-rs", @@ -986,9 +1001,9 @@ dependencies = [ [[package]] name = "brk_rolldown_ecmascript" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12d26e2532952544c39008f433b027bc5f8f9edfbb5ab5e311f4a7005c2aeb5a" +checksum = "d800edb568de11ed78992999714715967ff10546680074200e0e55cc5c3c1c96" dependencies = [ "arcstr", "brk_rolldown_error", @@ -999,9 +1014,9 @@ dependencies = [ [[package]] name = "brk_rolldown_ecmascript_utils" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79358b81155f0afa079d53b796370fbb56c3bb730c3790af6542509aec3479aa" +checksum = "7e9929eb305c9e89c4c7fa67609b646ce1dac106bef8174e2efec49cf5e4860c" dependencies = [ "brk_rolldown_common", "brk_rolldown_utils", @@ -1011,14 +1026,13 @@ dependencies = [ [[package]] name = "brk_rolldown_error" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01bf061dca4d33bb0268736b95165ff7cb837341d1ed60d596e54d9949bb7099" +checksum = "edf088c2f5c281a9414cdaca81b43d0aa3d4f58e495dde3b4bf9b32b2692962b" dependencies = [ "anyhow", "arcstr", "bitflags 2.10.0", - "brk_rolldown_utils", "derive_more", "heck", "oxc", @@ -1031,9 +1045,9 @@ dependencies = [ [[package]] name = "brk_rolldown_fs" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6e34c23974f0099042093821fd49de7fae6ed8d7177ad4e8418469c45b46677" +checksum = "84028eb06b4c568820acf53a38ec655ea53d0068d3d9fc182c95a8fe6dade8de" dependencies = [ "oxc_resolver", "vfs", @@ -1041,9 +1055,9 @@ dependencies = [ [[package]] name = "brk_rolldown_plugin" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49171536730187d7154281b7830ef1874f1deb6ff0c166b5571ee9aa891851e8" +checksum = "da9cca9f803f2b59cd995f95a274e4cb2444390773077bd43c09ce0975a07456" dependencies = [ "anyhow", "arcstr", @@ -1072,9 +1086,9 @@ dependencies = [ [[package]] name = "brk_rolldown_plugin_chunk_import_map" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40d1670473ab8b403186ff59b3acaf5fc6585888cd96281181435629ea737639" +checksum = "42f2a46fe5e18e0a32117ad1bea1a965b7a612d4ff02cdddefa45974fca2d918" dependencies = [ "arcstr", "brk_rolldown_common", @@ -1087,9 +1101,9 @@ dependencies = [ [[package]] name = "brk_rolldown_plugin_data_uri" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812aa9ecfca91b39854e1456cdcb9ea29ef0792effe655c119987b964a89695e" +checksum = "7ac7a2b36c4447385800a3ad79cc8abc490d6321850f4976a641bceda12fa91a" dependencies = [ "arcstr", "base64-simd", @@ -1102,9 +1116,9 @@ dependencies = [ [[package]] name = "brk_rolldown_plugin_hmr" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5121b446e0d7134ca8591cd202193015e342bcc5245a5a848ce352c4758e825" +checksum = "1f5f24a619e9a43ecf4716eafb10d7e92b5a5b36ef0a8ec844e7e4acae66700c" dependencies = [ "arcstr", "brk_rolldown_common", @@ -1114,9 +1128,9 @@ dependencies = [ [[package]] name = "brk_rolldown_plugin_lazy_compilation" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11302e58eb56f543e3fdfb5fd555050f73162f11c72adde4238be7717310280f" +checksum = "99cc47862efec15f1ff9a999059eceac70864170af745ce6709e12d20178e953" dependencies = [ "arcstr", "brk_rolldown_common", @@ -1126,9 +1140,9 @@ dependencies = [ [[package]] name = "brk_rolldown_plugin_oxc_runtime" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f742caad9c8e99e8ef9e7c444b080838317a945230b6befac271ab97337e844" +checksum = "c7bc9949621e6578900c55bee1d21d02846dd5c3d9779e014726cc2e07f3a5e8" dependencies = [ "arcstr", "brk_rolldown_plugin", @@ -1138,9 +1152,9 @@ dependencies = [ [[package]] name = "brk_rolldown_resolver" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c1a629f24ebb97efc1b3b534595edef9c8c0cf5d64de4562f62cb044ffd3b96" +checksum = "7ca8c556deb2a8ede39b31e354983d050831d61e70fcc1b306a9523c4d5714c8" dependencies = [ "anyhow", "arcstr", @@ -1155,9 +1169,9 @@ dependencies = [ [[package]] name = "brk_rolldown_sourcemap" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4da81607ddcff806e067ae5c1afadfeda0d3fe69edd6325e9f11404179f7f18" +checksum = "6d31ca16e9e3b35c18912426cac514389a9f90926cf48c0a30493ca563142fda" dependencies = [ "brk_rolldown_utils", "memchr", @@ -1168,18 +1182,18 @@ dependencies = [ [[package]] name = "brk_rolldown_std_utils" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6b587f4eb33cec46b60e6678cb184d51150ef397462ab420e6f5fe7d677a124" +checksum = "a5ecd853b69e91240e4274d7fe4e8ccdcda6b873ff0587cc349a0824eff2f5c3" dependencies = [ "regex", ] [[package]] name = "brk_rolldown_tracing" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aa971b9b87999cd83a958068b1dee1c404ae1528ec1786ef991b9f84e7c281f" +checksum = "b193c3c65b2c3d42b33b07e2bfd9beaa57e7173acd0be3160f1287b73ed40f90" dependencies = [ "tracing", "tracing-chrome", @@ -1188,15 +1202,16 @@ dependencies = [ [[package]] name = "brk_rolldown_utils" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b24794f5e7e65d4c126ae6cc6e18286f4363c2df318a2a06256cdf8d660f789" +checksum = "78ea0931cd583c7f4390d471e0b2d2ec2ed41281ab84d075c58d47baca3a03de" dependencies = [ "anyhow", "arcstr", "async-scoped", "base-encode", "base64-simd", + "brk_rolldown_error", "brk_rolldown_std_utils", "cow-utils", "dashmap", @@ -1243,7 +1258,7 @@ version = "0.1.0-alpha.1" dependencies = [ "aide", "axum", - "brk_binder", + "brk_bindgen", "brk_computer", "brk_error", "brk_fetcher", @@ -1281,9 +1296,9 @@ dependencies = [ [[package]] name = "brk_string_wizard" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7b2328e2cf4d22ff7c7a620ed887cd7e2fc0c81546fd03448d8e261ff00d905" +checksum = "7b426c5c8e9dc5d9117930d8dada29790a3be0a6e1ab329a915253272a666998" dependencies = [ "memchr", "oxc_index", @@ -1310,7 +1325,7 @@ version = "0.1.0-alpha.1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -1357,9 +1372,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" dependencies = [ "allocator-api2", ] @@ -1503,7 +1518,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -1830,7 +1845,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -1841,7 +1856,7 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ "darling_core", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -1875,7 +1890,7 @@ checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -1908,7 +1923,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 2.0.111", + "syn 2.0.112", "unicode-xid", ] @@ -1963,7 +1978,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -2026,7 +2041,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -2139,7 +2154,8 @@ checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" [[package]] name = "fjall" -version = "3.0.0-rc.6" +version = "3.0.0-rc.9" +source = "git+https://github.com/fjall-rs/fjall#ef220cd8502ff1f41bf00f7a01162040148b57fb" dependencies = [ "byteorder-lite", "byteview", @@ -2248,7 +2264,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -2342,7 +2358,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -2834,7 +2850,7 @@ checksum = "b787bebb543f8969132630c51fd0afab173a86c6abae56ff3b9e5e3e3f9f6e58" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -3018,7 +3034,9 @@ checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "lsm-tree" -version = "3.0.0-rc.8" +version = "3.0.0-rc.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b24b4f8669054ca480aeedf97c19157f94736a1bf87ab688aaea66e8f2d54d" dependencies = [ "byteorder-lite", "byteview", @@ -3266,7 +3284,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -3361,9 +3379,9 @@ checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52" [[package]] name = "oxc" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cbd3721f31187ec4f01caee2f772209987710f512ac8aab3a1fbcef9669220a" +checksum = "bad18615591b88dfe678605a9654701a43a69548e0488ed14aac03588e8892a3" dependencies = [ "oxc_allocator", "oxc_ast", @@ -3422,14 +3440,14 @@ checksum = "003b4612827f6501183873fb0735da92157e3c7daa71c40921c7d2758fec2229" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] name = "oxc_allocator" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70809b2ee0d82d38e0a145ae4c46c8384f1342ebb7be8503c1cde4d7ff9c9b8b" +checksum = "07f4ba3148223230c546c1064c2795ece78b647ca75c9e98d42418dd4d5e4cd7" dependencies = [ "allocator-api2", "bumpalo", @@ -3442,9 +3460,9 @@ dependencies = [ [[package]] name = "oxc_ast" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70e993b5f9f6ec07b5a52375564fcb60a13b9fc60b4260380c022e7eb09381d2" +checksum = "33a08c611e6a481bc573c4050708f418da9ae8f09c4fac5c295c86cca6bbd1ed" dependencies = [ "bitflags 2.10.0", "oxc_allocator", @@ -3459,21 +3477,21 @@ dependencies = [ [[package]] name = "oxc_ast_macros" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efae81973774d77eed1195a965d439c2f6812fa69393c68543bdb8fb7c0807de" +checksum = "3788ddf2f5da12f0eca87c849bc33016b4bf11eea2b92980bb751e0b6a83b51a" dependencies = [ "phf", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] name = "oxc_ast_visit" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "220baf0e5b21585787a08683db46898f5b58aaf18cd2f6e2d3d777ef4931fb22" +checksum = "1c58fd9b2c7697eb1dea5d30d4ae575de810c27a414396542321e292feac0c22" dependencies = [ "oxc_allocator", "oxc_ast", @@ -3483,9 +3501,9 @@ dependencies = [ [[package]] name = "oxc_cfg" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90dfb7c008174ab87bc12f2c43f99ef4a4ea0ca2bc61985658464febea68aed5" +checksum = "9bf08aa784189e4bc09bcda52c70132c830e029dd2287d932b4e4189fcfb26b6" dependencies = [ "bitflags 2.10.0", "itertools 0.14.0", @@ -3497,9 +3515,9 @@ dependencies = [ [[package]] name = "oxc_codegen" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05c0e632ae6cd9d99543006d4dacb9f35ef43c247828918273156a438740578a" +checksum = "31ce1592b043fe06b69d934902fb156ddb719716f4f79c505adbaf078700d4f2" dependencies = [ "bitflags 2.10.0", "cow-utils", @@ -3518,9 +3536,9 @@ dependencies = [ [[package]] name = "oxc_compat" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c065e71c19a1cbf820a952336120baa7edf6c0185e9935e86dc216e78d411718" +checksum = "1bd0ffbed285a4f1d8dab0fdac33e2fc0cddb9e68e2221838c9d6fd57a45182c" dependencies = [ "cow-utils", "oxc-browserslist", @@ -3531,18 +3549,18 @@ dependencies = [ [[package]] name = "oxc_data_structures" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47e8d129933ab203ea42d061f4ea5cf24aea7981bbccb3393bd86840a0414e4f" +checksum = "8c6253c51f3836c35d932153398aa7582706f8e39876eae0d7163311f419afc1" dependencies = [ "ropey", ] [[package]] name = "oxc_diagnostics" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8508cdba30d0df947d31577c394b6afacfb7fca376c347b74246c0b552bf1c1" +checksum = "fbdefb78ab7e05e0ed1301f914905292542633fb6129da67ce82a9d3c87921c4" dependencies = [ "cow-utils", "oxc-miette", @@ -3551,9 +3569,9 @@ dependencies = [ [[package]] name = "oxc_ecmascript" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb338ab6d3354919f0125a70f618d72f961777a724aa97eef963b9ccfbf90121" +checksum = "f845e02047887b1e4af5da1201b6d10f097f722e00cb5f7082bc847aa40f15ec" dependencies = [ "cow-utils", "num-bigint", @@ -3566,9 +3584,9 @@ dependencies = [ [[package]] name = "oxc_estree" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1745278e805cf12e56e789dd7202639ba8e662b18dab390719ea295eddcb7ea2" +checksum = "bd69fedb2ea8754a153e979e90fe31efed28789ead73d6d6fd69eb9025c729af" dependencies = [ "dragonbox_ecma", "itoa", @@ -3588,9 +3606,9 @@ dependencies = [ [[package]] name = "oxc_isolated_declarations" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a0e8f15425926ad00beec1a2464b90d96ed37c7ff818a59089aa1a0288dfaec" +checksum = "5b949e0360acb7bcd07bfa47eb661f117ad008fdf7e1dfe6a0b8de249050c768" dependencies = [ "bitflags 2.10.0", "oxc_allocator", @@ -3605,9 +3623,9 @@ dependencies = [ [[package]] name = "oxc_mangler" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e1771f55ff388d5b29d3bda7d8415ec0c9c900a15959960192efaa9250ba6e4" +checksum = "ebab202547eb10b469babe7c764e023721a06cde95c1280b2798a91b9664c244" dependencies = [ "itertools 0.14.0", "oxc_allocator", @@ -3622,9 +3640,9 @@ dependencies = [ [[package]] name = "oxc_minifier" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271b0d78245d1c0288dc5cb85ff4b18e4abcd1cbc57631e5def8f8d4440e9e3a" +checksum = "01b0811db451b9196ee128a20e8cbc34494b5b6c190598696bee496a0b1031a6" dependencies = [ "cow-utils", "oxc_allocator", @@ -3647,9 +3665,9 @@ dependencies = [ [[package]] name = "oxc_parser" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a43bfbde680f7e55fb879f56901a6f65d8450fc3f16b7c924e4f4b3f2629929a" +checksum = "8ec8d0fd27fffd5742181d1ca76450e25ae51a69dffbbe2076231173b262ab31" dependencies = [ "bitflags 2.10.0", "cow-utils", @@ -3670,9 +3688,9 @@ dependencies = [ [[package]] name = "oxc_regular_expression" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9076c980a2795d009b7509677dfbf765945b4aeec44bf7f796a4084a6e02e2ce" +checksum = "b4af1f85405275d20352a69e7caaa44a28d03ba91ecc951ec79a1bf3016062d7" dependencies = [ "bitflags 2.10.0", "oxc_allocator", @@ -3714,9 +3732,9 @@ dependencies = [ [[package]] name = "oxc_semantic" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e7532ec0fc54f0ee864ed144634bb1ca5f093646d60a0db53568acccc8470ff" +checksum = "ac84d63f0e43359f38af2478a7d20cc0aecb780de46cada3f14d8ead6c89bf8c" dependencies = [ "itertools 0.14.0", "oxc_allocator", @@ -3749,9 +3767,9 @@ dependencies = [ [[package]] name = "oxc_span" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79a2ab3c8f38813e24ac5d853e66c352254ed8ccd6fc773a9c666266644e6ea" +checksum = "32556c52175b0c616e44efa6c37f532c6a4c3a213761a10ed1b9beb3d3136a78" dependencies = [ "compact_str", "oxc-miette", @@ -3763,9 +3781,9 @@ dependencies = [ [[package]] name = "oxc_syntax" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "659c494d5979c1aa6f1c4c2f8baf98ec7a97b4dff31d580393494302bffc9278" +checksum = "6c5b2154749e8d2bf8ac77bfdf9f7e9912c4cb9b63ddfcc63841838a3be48d1a" dependencies = [ "bitflags 2.10.0", "cow-utils", @@ -3784,9 +3802,9 @@ dependencies = [ [[package]] name = "oxc_transformer" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0dfa6ec657feadd689b4a400cc0a7f46d9c656d88b5bc7590070ffee1f37368" +checksum = "aba5c1dca33ef51651f955fd90b1c005d8ea8ea570b220f7616ccdab50179b53" dependencies = [ "base64 0.22.1", "compact_str", @@ -3813,9 +3831,9 @@ dependencies = [ [[package]] name = "oxc_transformer_plugins" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2710a94a9829b9b22f7eb13675c63cb3227298fad675903ed7d56e6ab387305" +checksum = "b1f768070e71bad660f4344a949ed88a43cc87c1131a08da0f445f9b640f268e" dependencies = [ "cow-utils", "itoa", @@ -3835,9 +3853,9 @@ dependencies = [ [[package]] name = "oxc_traverse" -version = "0.103.0" +version = "0.106.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77bbade9aae708a8bafcfe7e0d17a7d53a36f91af9a3aeac7e9ef7c18d89f0c" +checksum = "936417f4b572d4ca1ce4ac1aaf66f5f6d76d7302d486ed5c92af14d18f9e9155" dependencies = [ "itoa", "oxc_allocator", @@ -3917,9 +3935,9 @@ dependencies = [ [[package]] name = "pco" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daea1197f2969fab4d5c6620eade5d46c98a8e9b04ad2bc3725fc5dfc4eb8a49" +checksum = "42382de9fb564e2d10cb4d5ca97cc06d928f0f9667bbef456b57e60827b6548b" dependencies = [ "better_io", "dtype_dispatch", @@ -3976,7 +3994,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -4005,7 +4023,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -4286,7 +4304,7 @@ dependencies = [ [[package]] name = "rawdb" -version = "0.5.0" +version = "0.5.2" dependencies = [ "libc", "log", @@ -4363,7 +4381,7 @@ checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -4574,7 +4592,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -4626,9 +4644,9 @@ dependencies = [ [[package]] name = "self_cell" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16c2f82143577edb4921b71ede051dac62ca3c16084e918bf7b40c96ae10eb33" +checksum = "b12e76d157a900eb52e81bc6e9f3069344290341720e9178cde2407113ac8d89" [[package]] name = "semver" @@ -4679,7 +4697,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -4690,7 +4708,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -4911,7 +4929,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -4936,9 +4954,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.111" +version = "2.0.112" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" +checksum = "21f182278bf2d2bcb3c88b1b08a37df029d71ce3d3ae26168e3c653b213b99d4" dependencies = [ "proc-macro2", "quote", @@ -4959,7 +4977,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -5021,7 +5039,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -5032,7 +5050,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -5096,7 +5114,7 @@ checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -5242,7 +5260,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -5332,7 +5350,7 @@ checksum = "ee6ff59666c9cbaec3533964505d39154dc4e0a56151fdea30a09ed0301f62e2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", "termcolor", ] @@ -5478,7 +5496,7 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23" [[package]] name = "vecdb" -version = "0.5.0" +version = "0.5.2" dependencies = [ "ctrlc", "log", @@ -5497,10 +5515,10 @@ dependencies = [ [[package]] name = "vecdb_derive" -version = "0.5.0" +version = "0.5.2" dependencies = [ "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -5587,7 +5605,7 @@ dependencies = [ "bumpalo", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", "wasm-bindgen-shared", ] @@ -5706,7 +5724,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -5717,7 +5735,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -5991,7 +6009,7 @@ checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", "synstructure", ] @@ -6012,7 +6030,7 @@ checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -6032,7 +6050,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", "synstructure", ] @@ -6066,7 +6084,7 @@ checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.112", ] [[package]] @@ -6091,9 +6109,9 @@ checksum = "40990edd51aae2c2b6907af74ffb635029d5788228222c4bb811e9351c0caad3" [[package]] name = "zmij" -version = "1.0.0" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d6085d62852e35540689d1f97ad663e3971fc19cf5eceab364d62c646ea167" +checksum = "de9211a9f64b825911bdf0240f58b7a8dac217fe260fc61f080a07f61372fbd5" [[package]] name = "zopfli" diff --git a/Cargo.toml b/Cargo.toml index 2261562c3..cc21cd8ec 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,14 +42,14 @@ bitcoin = { version = "0.32.8", features = ["serde"] } bitcoincore-rpc = "0.19.0" brk_alloc = { version = "0.1.0-alpha.1", path = "crates/brk_alloc" } brk_bencher = { version = "0.1.0-alpha.1", path = "crates/brk_bencher" } -brk_binder = { version = "0.1.0-alpha.1", path = "crates/brk_binder" } +brk_bindgen = { version = "0.1.0-alpha.1", path = "crates/brk_bindgen" } brk_bundler = { version = "0.1.0-alpha.1", path = "crates/brk_bundler" } brk_cli = { version = "0.1.0-alpha.1", path = "crates/brk_cli" } brk_client = { version = "0.1.0-alpha.1", path = "crates/brk_client" } +brk_cohort = { version = "0.1.0-alpha.1", path = "crates/brk_cohort" } brk_computer = { version = "0.1.0-alpha.1", path = "crates/brk_computer" } brk_error = { version = "0.1.0-alpha.1", path = "crates/brk_error" } brk_fetcher = { version = "0.1.0-alpha.1", path = "crates/brk_fetcher" } -brk_grouper = { version = "0.1.0-alpha.1", path = "crates/brk_grouper" } brk_indexer = { version = "0.1.0-alpha.1", path = "crates/brk_indexer" } brk_query = { version = "0.1.0-alpha.1", path = "crates/brk_query", features = ["tokio"] } brk_iterator = { version = "0.1.0-alpha.1", path = "crates/brk_iterator" } @@ -66,8 +66,9 @@ brk_traversable_derive = { version = "0.1.0-alpha.1", path = "crates/brk_travers byteview = "0.9.1" color-eyre = "0.6.5" derive_deref = "1.1.1" +env_logger = "0.11.8" # fjall = "3.0.0-rc.6" -fjall = { path = "../fjall" } +fjall = { git = "https://github.com/fjall-rs/fjall" } jiff = "0.2.17" log = "0.4.29" minreq = { version = "2.14.1", features = ["https", "serde_json"] } @@ -81,7 +82,7 @@ serde_derive = "1.0.228" serde_json = { version = "1.0.148", features = ["float_roundtrip"] } smallvec = "1.15.1" tokio = { version = "1.48.0", features = ["rt-multi-thread"] } -# vecdb = { version = "0.5.0", features = ["derive", "serde_json", "pco", "schemars"] } +# vecdb = { version = "0.5.2", features = ["derive", "serde_json", "pco", "schemars"] } vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] } # vecdb = { git = "https://github.com/anydb-rs/anydb", features = ["derive", "serde_json", "pco"] } diff --git a/crates/brk/Cargo.toml b/crates/brk/Cargo.toml index b00d20b70..a109425e9 100644 --- a/crates/brk/Cargo.toml +++ b/crates/brk/Cargo.toml @@ -32,7 +32,7 @@ full = [ "types", ] bencher = ["brk_bencher"] -binder = ["brk_binder"] +binder = ["brk_bindgen"] bundler = ["brk_bundler"] client = ["brk_client"] computer = ["brk_computer"] @@ -54,7 +54,7 @@ types = ["brk_types"] [dependencies] brk_bencher = { workspace = true, optional = true } -brk_binder = { workspace = true, optional = true } +brk_bindgen = { workspace = true, optional = true } brk_bundler = { workspace = true, optional = true } brk_client = { workspace = true, optional = true } brk_computer = { workspace = true, optional = true } diff --git a/crates/brk/src/lib.rs b/crates/brk/src/lib.rs index e22331b90..5fde17bff 100644 --- a/crates/brk/src/lib.rs +++ b/crates/brk/src/lib.rs @@ -6,7 +6,7 @@ pub use brk_bencher as bencher; #[cfg(feature = "binder")] #[doc(inline)] -pub use brk_binder as binder; +pub use brk_bindgen as binder; #[cfg(feature = "bundler")] #[doc(inline)] diff --git a/crates/brk_binder/src/javascript.rs b/crates/brk_binder/src/javascript.rs deleted file mode 100644 index 9030010b9..000000000 --- a/crates/brk_binder/src/javascript.rs +++ /dev/null @@ -1,1087 +0,0 @@ -use std::{collections::HashSet, fmt::Write as FmtWrite, fs, io, path::Path}; - -use serde_json::json; - -use brk_cohort::{ - AGE_RANGE_NAMES, AMOUNT_RANGE_NAMES, EPOCH_NAMES, GE_AMOUNT_NAMES, LT_AMOUNT_NAMES, - MAX_AGE_NAMES, MIN_AGE_NAMES, SPENDABLE_TYPE_NAMES, TERM_NAMES, YEAR_NAMES, -}; -use brk_types::{Index, PoolSlug, TreeNode, pools}; -use serde_json::Value; - -use crate::{ - ClientMetadata, Endpoint, FieldNamePosition, IndexSetPattern, PatternField, StructuralPattern, - TypeSchemas, VERSION, extract_inner_type, get_fields_with_child_info, get_first_leaf_name, - get_node_fields, get_pattern_instance_base, to_camel_case, to_pascal_case, -}; - -/// Generate JavaScript + JSDoc client from metadata and OpenAPI endpoints. -/// -/// `output_path` is the full path to the output file (e.g., "modules/brk-client/index.js"). -pub fn generate_javascript_client( - metadata: &ClientMetadata, - endpoints: &[Endpoint], - schemas: &TypeSchemas, - output_path: &Path, -) -> io::Result<()> { - let mut output = String::new(); - - writeln!(output, "// Auto-generated BRK JavaScript client").unwrap(); - writeln!(output, "// Do not edit manually\n").unwrap(); - - generate_type_definitions(&mut output, schemas); - generate_base_client(&mut output); - generate_index_accessors(&mut output, &metadata.index_set_patterns); - generate_structural_patterns(&mut output, &metadata.structural_patterns, metadata); - generate_tree_typedefs(&mut output, &metadata.catalog, metadata); - generate_main_client(&mut output, &metadata.catalog, metadata, endpoints); - - fs::write(output_path, output)?; - - // Update package.json version if it exists in the same directory - if let Some(parent) = output_path.parent() { - let package_json_path = parent.join("package.json"); - if package_json_path.exists() { - update_package_json_version(&package_json_path)?; - } - } - - Ok(()) -} - -/// Update the version field in package.json to match the current VERSION. -fn update_package_json_version(package_json_path: &Path) -> io::Result<()> { - let content = fs::read_to_string(package_json_path)?; - let mut package: serde_json::Value = serde_json::from_str(&content) - .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; - - if let Some(obj) = package.as_object_mut() { - obj.insert("version".to_string(), json!(VERSION)); - } - - let updated = serde_json::to_string_pretty(&package) - .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; - - fs::write(package_json_path, updated + "\n")?; - - Ok(()) -} - -/// Convert only top-level object keys to camelCase (for consistency with tree field names) -/// Nested values (including id fields) are left unchanged for URL paths -fn camel_case_top_level_keys(value: Value) -> Value { - match value { - Value::Object(map) => { - let new_map: serde_json::Map = map - .into_iter() - .map(|(k, v)| (to_camel_case(&k), v)) - .collect(); - Value::Object(new_map) - } - other => other, - } -} - -fn generate_static_constants(output: &mut String) { - use serde::Serialize; - - fn instance_const(output: &mut String, name: &str, value: &T) { - let json = serde_json::to_string_pretty(value).unwrap(); - let indented = json - .lines() - .enumerate() - .map(|(i, line)| { - if i == 0 { - line.to_string() - } else { - format!(" {}", line) - } - }) - .collect::>() - .join("\n"); - writeln!( - output, - " {} = /** @type {{const}} */ ({});\n", - name, indented - ) - .unwrap(); - } - - fn instance_const_raw(output: &mut String, name: &str, value: &str) { - writeln!(output, " {} = {};\n", name, value).unwrap(); - } - - // VERSION - instance_const_raw(output, "VERSION", &format!("\"v{}\"", VERSION)); - - // INDEXES - let indexes = Index::all(); - let indexes_json: Vec<&'static str> = indexes.iter().map(|i| i.serialize_long()).collect(); - instance_const(output, "INDEXES", &indexes_json); - - // POOL_ID_TO_POOL_NAME - let pools = pools(); - let mut sorted_pools: Vec<_> = pools.iter().collect(); - sorted_pools.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); - let pool_map: std::collections::BTreeMap = - sorted_pools.iter().map(|p| (p.slug(), p.name)).collect(); - instance_const(output, "POOL_ID_TO_POOL_NAME", &pool_map); - - // Cohort names - top-level keys converted to camelCase to match tree field names - fn instance_const_camel(output: &mut String, name: &str, value: &T) { - let json_value: Value = serde_json::to_value(value).unwrap(); - let camel_value = camel_case_top_level_keys(json_value); - let json = serde_json::to_string_pretty(&camel_value).unwrap(); - let indented = json - .lines() - .enumerate() - .map(|(i, line)| { - if i == 0 { - line.to_string() - } else { - format!(" {}", line) - } - }) - .collect::>() - .join("\n"); - writeln!( - output, - " {} = /** @type {{const}} */ ({});\n", - name, indented - ) - .unwrap(); - } - - instance_const_camel(output, "TERM_NAMES", &TERM_NAMES); - instance_const_camel(output, "EPOCH_NAMES", &EPOCH_NAMES); - instance_const_camel(output, "YEAR_NAMES", &YEAR_NAMES); - instance_const_camel(output, "SPENDABLE_TYPE_NAMES", &SPENDABLE_TYPE_NAMES); - instance_const_camel(output, "AGE_RANGE_NAMES", &AGE_RANGE_NAMES); - instance_const_camel(output, "MAX_AGE_NAMES", &MAX_AGE_NAMES); - instance_const_camel(output, "MIN_AGE_NAMES", &MIN_AGE_NAMES); - instance_const_camel(output, "AMOUNT_RANGE_NAMES", &AMOUNT_RANGE_NAMES); - instance_const_camel(output, "GE_AMOUNT_NAMES", &GE_AMOUNT_NAMES); - instance_const_camel(output, "LT_AMOUNT_NAMES", <_AMOUNT_NAMES); -} - -fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) { - if schemas.is_empty() { - return; - } - - writeln!(output, "// Type definitions\n").unwrap(); - - for (name, schema) in schemas { - let js_type = schema_to_js_type_ctx(schema, Some(name)); - - if is_primitive_alias(schema) { - writeln!(output, "/** @typedef {{{}}} {} */", js_type, name).unwrap(); - } else if let Some(props) = schema.get("properties").and_then(|p| p.as_object()) { - writeln!(output, "/**").unwrap(); - writeln!(output, " * @typedef {{Object}} {}", name).unwrap(); - for (prop_name, prop_schema) in props { - let prop_type = schema_to_js_type_ctx(prop_schema, Some(name)); - let required = schema - .get("required") - .and_then(|r| r.as_array()) - .map(|arr| arr.iter().any(|v| v.as_str() == Some(prop_name))) - .unwrap_or(false); - let optional = if required { "" } else { "=" }; - let safe_name = to_camel_case(prop_name); - writeln!( - output, - " * @property {{{}{}}} {}", - prop_type, optional, safe_name - ) - .unwrap(); - } - writeln!(output, " */").unwrap(); - } else { - writeln!(output, "/** @typedef {{{}}} {} */", js_type, name).unwrap(); - } - } - writeln!(output).unwrap(); -} - -fn is_primitive_alias(schema: &Value) -> bool { - schema.get("properties").is_none() - && schema.get("items").is_none() - && schema.get("anyOf").is_none() - && schema.get("oneOf").is_none() - && schema.get("enum").is_none() -} - -fn json_type_to_js(ty: &str, schema: &Value, current_type: Option<&str>) -> String { - match ty { - "integer" | "number" => "number".to_string(), - "boolean" => "boolean".to_string(), - "string" => "string".to_string(), - "null" => "null".to_string(), - "array" => { - let item_type = schema - .get("items") - .map(|s| schema_to_js_type_ctx(s, current_type)) - .unwrap_or_else(|| "*".to_string()); - format!("{}[]", item_type) - } - "object" => { - if let Some(add_props) = schema.get("additionalProperties") { - let value_type = schema_to_js_type_ctx(add_props, current_type); - return format!("{{ [key: string]: {} }}", value_type); - } - "Object".to_string() - } - _ => "*".to_string(), - } -} - -fn schema_to_js_type_ctx(schema: &Value, current_type: Option<&str>) -> String { - if let Some(all_of) = schema.get("allOf").and_then(|v| v.as_array()) { - for item in all_of { - let resolved = schema_to_js_type_ctx(item, current_type); - if resolved != "*" { - return resolved; - } - } - } - - if let Some(ref_path) = schema.get("$ref").and_then(|r| r.as_str()) { - return ref_path.rsplit('/').next().unwrap_or("*").to_string(); - } - - if let Some(enum_values) = schema.get("enum").and_then(|e| e.as_array()) { - let literals: Vec = enum_values - .iter() - .filter_map(|v| v.as_str()) - .map(|s| format!("\"{}\"", s)) - .collect(); - if !literals.is_empty() { - return format!("({})", literals.join("|")); - } - } - - if let Some(ty) = schema.get("type") { - if let Some(type_array) = ty.as_array() { - let types: Vec = type_array - .iter() - .filter_map(|t| t.as_str()) - .filter(|t| *t != "null") - .map(|t| json_type_to_js(t, schema, current_type)) - .collect(); - let has_null = type_array.iter().any(|t| t.as_str() == Some("null")); - - if types.len() == 1 { - let base_type = &types[0]; - return if has_null { - format!("?{}", base_type) - } else { - base_type.clone() - }; - } else if !types.is_empty() { - let union = format!("({})", types.join("|")); - return if has_null { - format!("?{}", union) - } else { - union - }; - } - } - - if let Some(ty_str) = ty.as_str() { - return json_type_to_js(ty_str, schema, current_type); - } - } - - if let Some(variants) = schema - .get("anyOf") - .or_else(|| schema.get("oneOf")) - .and_then(|v| v.as_array()) - { - let types: Vec = variants - .iter() - .map(|v| schema_to_js_type_ctx(v, current_type)) - .collect(); - let filtered: Vec<_> = types.iter().filter(|t| *t != "*").collect(); - if !filtered.is_empty() { - return format!( - "({})", - filtered - .iter() - .map(|s| s.as_str()) - .collect::>() - .join("|") - ); - } - return format!("({})", types.join("|")); - } - - if let Some(format) = schema.get("format").and_then(|f| f.as_str()) { - return match format { - "int32" | "int64" => "number".to_string(), - "float" | "double" => "number".to_string(), - "date" | "date-time" => "string".to_string(), - _ => "*".to_string(), - }; - } - - "*".to_string() -} - -fn generate_base_client(output: &mut String) { - writeln!( - output, - r#"/** - * @typedef {{Object}} BrkClientOptions - * @property {{string}} baseUrl - Base URL for the API - * @property {{number}} [timeout] - Request timeout in milliseconds - */ - -const _isBrowser = typeof window !== 'undefined' && 'caches' in window; -const _runIdle = (/** @type {{VoidFunction}} */ fn) => (globalThis.requestIdleCallback ?? setTimeout)(fn); - -/** @type {{Promise}} */ -const _cachePromise = _isBrowser - ? caches.open('__BRK_CLIENT__').catch(() => null) - : Promise.resolve(null); - -/** - * Custom error class for BRK client errors - */ -class BrkError extends Error {{ - /** - * @param {{string}} message - * @param {{number}} [status] - */ - constructor(message, status) {{ - super(message); - this.name = 'BrkError'; - this.status = status; - }} -}} - -/** - * A metric node that can fetch data for different indexes. - * @template T - */ -class MetricNode {{ - /** - * @param {{BrkClientBase}} client - * @param {{string}} path - */ - constructor(client, path) {{ - this._client = client; - this._path = path; - }} - - /** - * Get the path for this metric. - * @returns {{string}} - */ - get path() {{ - return this._path; - }} - - /** - * Fetch all data points for this metric. - * @param {{(value: T[]) => void}} [onUpdate] - Called when data is available (may be called twice: cache then fresh) - * @returns {{Promise}} - */ - get(onUpdate) {{ - return this._client.get(this._path, onUpdate); - }} - - /** - * Fetch data points within a range. - * @param {{string | number}} [from] - * @param {{string | number}} [to] - * @param {{(value: T[]) => void}} [onUpdate] - Called when data is available (may be called twice: cache then fresh) - * @returns {{Promise}} - */ - getRange(from, to, onUpdate) {{ - const params = new URLSearchParams(); - if (from !== undefined) params.set('from', String(from)); - if (to !== undefined) params.set('to', String(to)); - const query = params.toString(); - return this._client.get(query ? `${{this._path}}?${{query}}` : this._path, onUpdate); - }} -}} - -/** - * Base HTTP client for making requests with caching support - */ -class BrkClientBase {{ - /** - * @param {{BrkClientOptions|string}} options - */ - constructor(options) {{ - const isString = typeof options === 'string'; - this.baseUrl = isString ? options : options.baseUrl; - this.timeout = isString ? 5000 : (options.timeout ?? 5000); - }} - - /** - * Make a GET request with stale-while-revalidate caching - * @template T - * @param {{string}} path - * @param {{(value: T) => void}} [onUpdate] - Called when data is available - * @returns {{Promise}} - */ - async get(path, onUpdate) {{ - const url = `${{this.baseUrl}}${{path}}`; - const cache = await _cachePromise; - const cachedRes = await cache?.match(url); - const cachedJson = cachedRes ? await cachedRes.json() : null; - - if (cachedJson) onUpdate?.(cachedJson); - if (!globalThis.navigator?.onLine) {{ - if (cachedJson) return cachedJson; - throw new BrkError('Offline and no cached data available'); - }} - - try {{ - const res = await fetch(url, {{ signal: AbortSignal.timeout(this.timeout) }}); - if (!res.ok) throw new BrkError(`HTTP ${{res.status}}`, res.status); - if (cachedRes?.headers.get('ETag') === res.headers.get('ETag')) return cachedJson; - - const cloned = res.clone(); - const json = await res.json(); - onUpdate?.(json); - if (cache) _runIdle(() => cache.put(url, cloned)); - return json; - }} catch (e) {{ - if (cachedJson) return cachedJson; - throw e; - }} - }} -}} - -"# - ) - .unwrap(); -} - -fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) { - if patterns.is_empty() { - return; - } - - writeln!(output, "// Index accessor factory functions\n").unwrap(); - - // Generate the ByIndexes type for each pattern - for pattern in patterns { - let by_type_name = format!("{}By", pattern.name); - - // Inner 'by' object type - writeln!(output, "/**").unwrap(); - writeln!(output, " * @template T").unwrap(); - writeln!(output, " * @typedef {{Object}} {}", by_type_name).unwrap(); - for index in &pattern.indexes { - let index_name = index.serialize_long(); - writeln!(output, " * @property {{MetricNode}} {}", index_name).unwrap(); - } - writeln!(output, " */\n").unwrap(); - - // Outer type with 'by' property and indexes method - writeln!(output, "/**").unwrap(); - writeln!(output, " * @template T").unwrap(); - writeln!(output, " * @typedef {{Object}} {}", pattern.name).unwrap(); - writeln!(output, " * @property {{{}}} by", by_type_name).unwrap(); - writeln!(output, " * @property {{() => Index[]}} indexes").unwrap(); - writeln!(output, " */\n").unwrap(); - - // Generate factory function - writeln!(output, "/**").unwrap(); - writeln!(output, " * Create a {} accessor", pattern.name).unwrap(); - writeln!(output, " * @template T").unwrap(); - writeln!(output, " * @param {{BrkClientBase}} client").unwrap(); - writeln!(output, " * @param {{string}} basePath").unwrap(); - writeln!(output, " * @returns {{{}}}", pattern.name).unwrap(); - writeln!(output, " */").unwrap(); - writeln!( - output, - "function create{}(client, basePath) {{", - pattern.name - ) - .unwrap(); - writeln!(output, " return {{").unwrap(); - writeln!(output, " by: {{").unwrap(); - - for (i, index) in pattern.indexes.iter().enumerate() { - let index_name = index.serialize_long(); - let comma = if i < pattern.indexes.len() - 1 { - "," - } else { - "" - }; - writeln!( - output, - " {}: new MetricNode(client, `${{basePath}}/{}`){}", - index_name, index_name, comma - ) - .unwrap(); - } - - writeln!(output, " }},").unwrap(); - writeln!(output, " indexes() {{").unwrap(); - writeln!(output, " return /** @type {{Index[]}} */ (Object.keys(this.by));").unwrap(); - writeln!(output, " }}").unwrap(); - writeln!(output, " }};").unwrap(); - writeln!(output, "}}\n").unwrap(); - } -} - -fn generate_structural_patterns( - output: &mut String, - patterns: &[StructuralPattern], - metadata: &ClientMetadata, -) { - if patterns.is_empty() { - return; - } - - writeln!(output, "// Reusable structural pattern factories\n").unwrap(); - - for pattern in patterns { - let is_parameterizable = pattern.is_parameterizable(); - - writeln!(output, "/**").unwrap(); - if pattern.is_generic { - writeln!(output, " * @template T").unwrap(); - } - writeln!(output, " * @typedef {{Object}} {}", pattern.name).unwrap(); - for field in &pattern.fields { - let js_type = field_to_js_type_generic(field, metadata, pattern.is_generic); - writeln!( - output, - " * @property {{{}}} {}", - js_type, - to_camel_case(&field.name) - ) - .unwrap(); - } - writeln!(output, " */\n").unwrap(); - - // Generate factory function - writeln!(output, "/**").unwrap(); - writeln!(output, " * Create a {} pattern node", pattern.name).unwrap(); - if pattern.is_generic { - writeln!(output, " * @template T").unwrap(); - } - writeln!(output, " * @param {{BrkClientBase}} client").unwrap(); - if is_parameterizable { - writeln!(output, " * @param {{string}} acc - Accumulated metric name").unwrap(); - } else { - writeln!(output, " * @param {{string}} basePath").unwrap(); - } - let return_type = if pattern.is_generic { - format!("{}", pattern.name) - } else { - pattern.name.clone() - }; - writeln!(output, " * @returns {{{}}}", return_type).unwrap(); - writeln!(output, " */").unwrap(); - - let param_name = if is_parameterizable { - "acc" - } else { - "basePath" - }; - writeln!( - output, - "function create{}(client, {}) {{", - pattern.name, param_name - ) - .unwrap(); - writeln!(output, " return {{").unwrap(); - - for (i, field) in pattern.fields.iter().enumerate() { - let comma = if i < pattern.fields.len() - 1 { - "," - } else { - "" - }; - - if is_parameterizable { - generate_parameterized_field(output, field, pattern, metadata, comma); - } else { - generate_tree_path_field(output, field, metadata, comma); - } - } - - writeln!(output, " }};").unwrap(); - writeln!(output, "}}\n").unwrap(); - } -} - -fn generate_parameterized_field( - output: &mut String, - field: &PatternField, - pattern: &StructuralPattern, - metadata: &ClientMetadata, - comma: &str, -) { - let field_name_js = to_camel_case(&field.name); - - if metadata.is_pattern_type(&field.rust_type) { - let child_acc = if let Some(pos) = pattern.get_field_position(&field.name) { - match pos { - FieldNamePosition::Append(suffix) => format!("`${{acc}}{}`", suffix), - FieldNamePosition::Prepend(prefix) => format!("`{}{}`", prefix, "${acc}"), - FieldNamePosition::Identity => "acc".to_string(), - FieldNamePosition::SetBase(base) => format!("'{}'", base), - } - } else { - format!("`${{acc}}_{}`", field.name) - }; - - writeln!( - output, - " {}: create{}(client, {}){}", - field_name_js, field.rust_type, child_acc, comma - ) - .unwrap(); - return; - } - - let metric_expr = if let Some(pos) = pattern.get_field_position(&field.name) { - match pos { - FieldNamePosition::Append(suffix) => format!("`${{acc}}{suffix}`"), - FieldNamePosition::Prepend(prefix) => format!("`{prefix}${{acc}}`"), - FieldNamePosition::Identity => "acc".to_string(), - FieldNamePosition::SetBase(base) => format!("'{base}'"), - } - } else { - format!("`${{acc}}_{}`", field.name) - }; - - if metadata.field_uses_accessor(field) { - let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); - writeln!( - output, - " {}: create{}(client, {}){}", - field_name_js, accessor.name, metric_expr, comma - ) - .unwrap(); - } else { - writeln!( - output, - " {}: new MetricNode(client, {}){}", - field_name_js, metric_expr, comma - ) - .unwrap(); - } -} - -fn generate_tree_path_field( - output: &mut String, - field: &PatternField, - metadata: &ClientMetadata, - comma: &str, -) { - let field_name_js = to_camel_case(&field.name); - - if metadata.is_pattern_type(&field.rust_type) { - writeln!( - output, - " {}: create{}(client, `${{basePath}}_{}`){}", - field_name_js, field.rust_type, field.name, comma - ) - .unwrap(); - } else if metadata.field_uses_accessor(field) { - let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); - writeln!( - output, - " {}: create{}(client, `${{basePath}}_{}`){}", - field_name_js, accessor.name, field.name, comma - ) - .unwrap(); - } else { - writeln!( - output, - " {}: new MetricNode(client, `${{basePath}}_{}`){}", - field_name_js, field.name, comma - ) - .unwrap(); - } -} - -fn field_to_js_type_generic( - field: &PatternField, - metadata: &ClientMetadata, - is_generic: bool, -) -> String { - field_to_js_type_with_generic_value(field, metadata, is_generic, None) -} - -fn field_to_js_type_with_generic_value( - field: &PatternField, - metadata: &ClientMetadata, - is_generic: bool, - generic_value_type: Option<&str>, -) -> String { - let value_type = if is_generic && field.rust_type == "T" { - "T".to_string() - } else { - extract_inner_type(&field.rust_type) - }; - - if metadata.is_pattern_type(&field.rust_type) { - if metadata.is_pattern_generic(&field.rust_type) { - // Use type_param from field, then generic_value_type, then T if parent is generic - let type_param = field - .type_param - .as_deref() - .or(generic_value_type) - .unwrap_or(if is_generic { "T" } else { "unknown" }); - return format!("{}<{}>", field.rust_type, type_param); - } - field.rust_type.clone() - } else if field.is_branch() { - // Non-pattern branch struct - field.rust_type.clone() - } else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) { - format!("{}<{}>", accessor.name, value_type) - } else { - format!("MetricNode<{}>", value_type) - } -} - -fn generate_tree_typedefs(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) { - writeln!(output, "// Catalog tree typedefs\n").unwrap(); - - let pattern_lookup = metadata.pattern_lookup(); - let mut generated = HashSet::new(); - generate_tree_typedef( - output, - "CatalogTree", - catalog, - &pattern_lookup, - metadata, - &mut generated, - ); -} - -fn generate_tree_typedef( - output: &mut String, - name: &str, - node: &TreeNode, - pattern_lookup: &std::collections::HashMap, String>, - metadata: &ClientMetadata, - generated: &mut HashSet, -) { - let TreeNode::Branch(children) = node else { - return; - }; - - let fields_with_child_info = get_fields_with_child_info(children, name, pattern_lookup); - let fields: Vec = fields_with_child_info - .iter() - .map(|(f, _)| f.clone()) - .collect(); - - if pattern_lookup.contains_key(&fields) - && pattern_lookup.get(&fields) != Some(&name.to_string()) - { - return; - } - - if generated.contains(name) { - return; - } - generated.insert(name.to_string()); - - writeln!(output, "/**").unwrap(); - writeln!(output, " * @typedef {{Object}} {}", name).unwrap(); - - for (field, child_fields) in &fields_with_child_info { - let generic_value_type = child_fields - .as_ref() - .and_then(|cf| metadata.get_type_param(cf)) - .map(String::as_str); - let js_type = - field_to_js_type_with_generic_value(field, metadata, false, generic_value_type); - writeln!( - output, - " * @property {{{}}} {}", - js_type, - to_camel_case(&field.name) - ) - .unwrap(); - } - - writeln!(output, " */\n").unwrap(); - - for (child_name, child_node) in children { - if let TreeNode::Branch(grandchildren) = child_node { - let child_fields = get_node_fields(grandchildren, pattern_lookup); - if !pattern_lookup.contains_key(&child_fields) { - let child_type_name = format!("{}_{}", name, to_pascal_case(child_name)); - generate_tree_typedef( - output, - &child_type_name, - child_node, - pattern_lookup, - metadata, - generated, - ); - } - } - } -} - -fn generate_main_client( - output: &mut String, - catalog: &TreeNode, - metadata: &ClientMetadata, - endpoints: &[Endpoint], -) { - let pattern_lookup = metadata.pattern_lookup(); - - writeln!(output, "/**").unwrap(); - writeln!( - output, - " * Main BRK client with catalog tree and API methods" - ) - .unwrap(); - writeln!(output, " * @extends BrkClientBase").unwrap(); - writeln!(output, " */").unwrap(); - writeln!(output, "class BrkClient extends BrkClientBase {{").unwrap(); - - // Generate static properties for constants - generate_static_constants(output); - - writeln!(output, " /**").unwrap(); - writeln!(output, " * @param {{BrkClientOptions|string}} options").unwrap(); - writeln!(output, " */").unwrap(); - writeln!(output, " constructor(options) {{").unwrap(); - writeln!(output, " super(options);").unwrap(); - writeln!(output, " /** @type {{CatalogTree}} */").unwrap(); - writeln!(output, " this.tree = this._buildTree('');").unwrap(); - writeln!(output, " }}\n").unwrap(); - - writeln!(output, " /**").unwrap(); - writeln!(output, " * @private").unwrap(); - writeln!(output, " * @param {{string}} basePath").unwrap(); - writeln!(output, " * @returns {{CatalogTree}}").unwrap(); - writeln!(output, " */").unwrap(); - writeln!(output, " _buildTree(basePath) {{").unwrap(); - writeln!(output, " return {{").unwrap(); - generate_tree_initializer(output, catalog, "", 3, &pattern_lookup, metadata); - writeln!(output, " }};").unwrap(); - writeln!(output, " }}\n").unwrap(); - - generate_api_methods(output, endpoints); - - writeln!(output, "}}\n").unwrap(); - - writeln!( - output, - "export {{ BrkClient, BrkClientBase, BrkError, MetricNode }};" - ) - .unwrap(); -} - -fn generate_tree_initializer( - output: &mut String, - node: &TreeNode, - accumulated_name: &str, - indent: usize, - pattern_lookup: &std::collections::HashMap, String>, - metadata: &ClientMetadata, -) { - let indent_str = " ".repeat(indent); - - if let TreeNode::Branch(children) = node { - for (i, (child_name, child_node)) in children.iter().enumerate() { - let field_name = to_camel_case(child_name); - let comma = if i < children.len() - 1 { "," } else { "" }; - - match child_node { - TreeNode::Leaf(leaf) => { - let metric_path = format!("/{}", leaf.name()); - if let Some(accessor) = metadata.find_index_set_pattern(leaf.indexes()) { - writeln!( - output, - "{}{}: create{}(this, '{}'){}", - indent_str, field_name, accessor.name, metric_path, comma - ) - .unwrap(); - } else { - writeln!( - output, - "{}{}: new MetricNode(this, '{}'){}", - indent_str, field_name, metric_path, comma - ) - .unwrap(); - } - } - TreeNode::Branch(grandchildren) => { - let child_fields = get_node_fields(grandchildren, pattern_lookup); - if let Some(pattern_name) = pattern_lookup.get(&child_fields) { - let pattern = metadata - .structural_patterns - .iter() - .find(|p| &p.name == pattern_name); - let is_parameterizable = - pattern.map(|p| p.is_parameterizable()).unwrap_or(false); - - let arg = if is_parameterizable { - get_pattern_instance_base(child_node, child_name) - } else if accumulated_name.is_empty() { - format!("/{}", child_name) - } else { - format!("{}/{}", accumulated_name, child_name) - }; - - writeln!( - output, - "{}{}: create{}(this, '{}'){}", - indent_str, field_name, pattern_name, arg, comma - ) - .unwrap(); - } else { - let child_acc = - infer_child_accumulated_name(child_node, accumulated_name, child_name); - writeln!(output, "{}{}: {{", indent_str, field_name).unwrap(); - generate_tree_initializer( - output, - child_node, - &child_acc, - indent + 1, - pattern_lookup, - metadata, - ); - writeln!(output, "{}}}{}", indent_str, comma).unwrap(); - } - } - } - } - } -} - -fn infer_child_accumulated_name(node: &TreeNode, parent_acc: &str, field_name: &str) -> String { - if let Some(leaf_name) = get_first_leaf_name(node) - && let Some(pos) = leaf_name.find(field_name) - { - if pos == 0 { - return field_name.to_string(); - } else if leaf_name.chars().nth(pos - 1) == Some('_') { - if parent_acc.is_empty() { - return field_name.to_string(); - } - return format!("{}_{}", parent_acc, field_name); - } - } - - if parent_acc.is_empty() { - field_name.to_string() - } else { - format!("{}_{}", parent_acc, field_name) - } -} - -fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) { - for endpoint in endpoints { - if !endpoint.should_generate() { - continue; - } - - let method_name = endpoint_to_method_name(endpoint); - let return_type = endpoint.response_type.as_deref().unwrap_or("*"); - - writeln!(output, " /**").unwrap(); - if let Some(summary) = &endpoint.summary { - writeln!(output, " * {}", summary).unwrap(); - } - if let Some(desc) = &endpoint.description - && endpoint.summary.as_ref() != Some(desc) - { - writeln!(output, " * @description {}", desc).unwrap(); - } - - for param in &endpoint.path_params { - let desc = param.description.as_deref().unwrap_or(""); - writeln!( - output, - " * @param {{{}}} {} {}", - param.param_type, param.name, desc - ) - .unwrap(); - } - for param in &endpoint.query_params { - let optional = if param.required { "" } else { "=" }; - let desc = param.description.as_deref().unwrap_or(""); - writeln!( - output, - " * @param {{{}{}}} [{}] {}", - param.param_type, optional, param.name, desc - ) - .unwrap(); - } - - writeln!(output, " * @returns {{Promise<{}>}}", return_type).unwrap(); - writeln!(output, " */").unwrap(); - - let params = build_method_params(endpoint); - writeln!(output, " async {}({}) {{", method_name, params).unwrap(); - - let path = build_path_template(&endpoint.path, &endpoint.path_params); - - if endpoint.query_params.is_empty() { - writeln!(output, " return this.get(`{}`);", path).unwrap(); - } else { - writeln!(output, " const params = new URLSearchParams();").unwrap(); - for param in &endpoint.query_params { - if param.required { - writeln!( - output, - " params.set('{}', String({}));", - param.name, param.name - ) - .unwrap(); - } else { - writeln!( - output, - " if ({} !== undefined) params.set('{}', String({}));", - param.name, param.name, param.name - ) - .unwrap(); - } - } - writeln!(output, " const query = params.toString();").unwrap(); - writeln!( - output, - " return this.get(`{}${{query ? '?' + query : ''}}`);", - path - ) - .unwrap(); - } - - writeln!(output, " }}\n").unwrap(); - } -} - -fn endpoint_to_method_name(endpoint: &Endpoint) -> String { - to_camel_case(&endpoint.operation_name()) -} - -fn build_method_params(endpoint: &Endpoint) -> String { - let mut params = Vec::new(); - for param in &endpoint.path_params { - params.push(param.name.clone()); - } - for param in &endpoint.query_params { - params.push(param.name.clone()); - } - params.join(", ") -} - -fn build_path_template(path: &str, path_params: &[super::Parameter]) -> String { - let mut result = path.to_string(); - for param in path_params { - let placeholder = format!("{{{}}}", param.name); - let interpolation = format!("${{{}}}", param.name); - result = result.replace(&placeholder, &interpolation); - } - result -} diff --git a/crates/brk_binder/src/python.rs b/crates/brk_binder/src/python.rs deleted file mode 100644 index 2f6f6af4c..000000000 --- a/crates/brk_binder/src/python.rs +++ /dev/null @@ -1,999 +0,0 @@ -use std::{collections::HashSet, fmt::Write as FmtWrite, fs, io, path::Path}; - -use brk_cohort::{ - AGE_RANGE_NAMES, AMOUNT_RANGE_NAMES, EPOCH_NAMES, GE_AMOUNT_NAMES, LT_AMOUNT_NAMES, - MAX_AGE_NAMES, MIN_AGE_NAMES, SPENDABLE_TYPE_NAMES, TERM_NAMES, YEAR_NAMES, -}; -use brk_types::{Index, TreeNode, pools}; -use serde::Serialize; -use serde_json::Value; - -use crate::{ - ClientMetadata, Endpoint, FieldNamePosition, IndexSetPattern, PatternField, StructuralPattern, - TypeSchemas, VERSION, extract_inner_type, get_fields_with_child_info, get_node_fields, - get_pattern_instance_base, to_pascal_case, to_snake_case, -}; - -/// Generate Python client from metadata and OpenAPI endpoints. -/// -/// `output_path` is the full path to the output file (e.g., "packages/brk_client/__init__.py"). -pub fn generate_python_client( - metadata: &ClientMetadata, - endpoints: &[Endpoint], - schemas: &TypeSchemas, - output_path: &Path, -) -> io::Result<()> { - let mut output = String::new(); - - writeln!(output, "# Auto-generated BRK Python client").unwrap(); - writeln!(output, "# Do not edit manually\n").unwrap(); - writeln!(output, "from __future__ import annotations").unwrap(); - writeln!( - output, - "from typing import TypeVar, Generic, Any, Optional, List, Literal, TypedDict, Final, Union" - ) - .unwrap(); - writeln!(output, "import httpx\n").unwrap(); - writeln!(output, "T = TypeVar('T')\n").unwrap(); - - generate_type_definitions(&mut output, schemas); - generate_base_client(&mut output); - generate_metric_node(&mut output); - generate_index_accessors(&mut output, &metadata.index_set_patterns); - generate_structural_patterns(&mut output, &metadata.structural_patterns, metadata); - generate_tree_classes(&mut output, &metadata.catalog, metadata); - generate_main_client(&mut output, endpoints); - - fs::write(output_path, output)?; - - Ok(()) -} - -fn generate_class_constants(output: &mut String) { - fn class_const(output: &mut String, name: &str, value: &T) { - let json = serde_json::to_string_pretty(value).unwrap(); - // Indent all lines for class body - let indented = json - .lines() - .enumerate() - .map(|(i, line)| { - if i == 0 { - format!(" {} = {}", name, line) - } else { - format!(" {}", line) - } - }) - .collect::>() - .join("\n"); - writeln!(output, "{}\n", indented).unwrap(); - } - - // VERSION - writeln!(output, " VERSION = \"v{}\"\n", VERSION).unwrap(); - - // INDEXES - let indexes = Index::all(); - let indexes_list: Vec<&str> = indexes.iter().map(|i| i.serialize_long()).collect(); - class_const(output, "INDEXES", &indexes_list); - - // POOL_ID_TO_POOL_NAME - let pools = pools(); - let mut sorted_pools: Vec<_> = pools.iter().collect(); - sorted_pools.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); - let pool_map: std::collections::BTreeMap = sorted_pools - .iter() - .map(|p| (p.slug().to_string(), p.name)) - .collect(); - class_const(output, "POOL_ID_TO_POOL_NAME", &pool_map); - - // Cohort names - class_const(output, "TERM_NAMES", &TERM_NAMES); - class_const(output, "EPOCH_NAMES", &EPOCH_NAMES); - class_const(output, "YEAR_NAMES", &YEAR_NAMES); - class_const(output, "SPENDABLE_TYPE_NAMES", &SPENDABLE_TYPE_NAMES); - class_const(output, "AGE_RANGE_NAMES", &AGE_RANGE_NAMES); - class_const(output, "MAX_AGE_NAMES", &MAX_AGE_NAMES); - class_const(output, "MIN_AGE_NAMES", &MIN_AGE_NAMES); - class_const(output, "AMOUNT_RANGE_NAMES", &AMOUNT_RANGE_NAMES); - class_const(output, "GE_AMOUNT_NAMES", &GE_AMOUNT_NAMES); - class_const(output, "LT_AMOUNT_NAMES", <_AMOUNT_NAMES); -} - -fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) { - if schemas.is_empty() { - return; - } - - writeln!(output, "# Type definitions\n").unwrap(); - - let sorted_names = topological_sort_schemas(schemas); - - for name in sorted_names { - let Some(schema) = schemas.get(&name) else { - continue; - }; - if let Some(props) = schema.get("properties").and_then(|p| p.as_object()) { - writeln!(output, "class {}(TypedDict):", name).unwrap(); - for (prop_name, prop_schema) in props { - let prop_type = schema_to_python_type_ctx(prop_schema, Some(&name)); - let safe_name = escape_python_keyword(prop_name); - writeln!(output, " {}: {}", safe_name, prop_type).unwrap(); - } - writeln!(output).unwrap(); - // } else if is_enum_schema(schema) { - // let py_type = schema_to_python_type_ctx(schema, Some(&name)); - // writeln!(output, "{} = {}", name, py_type).unwrap(); - } else { - let py_type = schema_to_python_type_ctx(schema, Some(&name)); - writeln!(output, "{} = {}", name, py_type).unwrap(); - } - } - writeln!(output).unwrap(); -} - -/// Topologically sort schema names so dependencies come before dependents (avoids forward references). -/// Types that reference other types (via $ref) must be defined after their dependencies. -fn topological_sort_schemas(schemas: &TypeSchemas) -> Vec { - use std::collections::{HashMap, HashSet}; - - // Build dependency graph - let mut deps: HashMap> = HashMap::new(); - for (name, schema) in schemas { - let mut type_deps = HashSet::new(); - collect_schema_refs(schema, &mut type_deps); - // Only keep deps that are in our schemas - type_deps.retain(|d| schemas.contains_key(d)); - deps.insert(name.clone(), type_deps); - } - - // Kahn's algorithm for topological sort - let mut in_degree: HashMap = HashMap::new(); - for name in schemas.keys() { - in_degree.insert(name.clone(), 0); - } - for type_deps in deps.values() { - for dep in type_deps { - *in_degree.entry(dep.clone()).or_insert(0) += 1; - } - } - - // Start with types that have no dependents (are not referenced by others) - let mut queue: Vec = in_degree - .iter() - .filter(|(_, count)| **count == 0) - .map(|(name, _)| name.clone()) - .collect(); - queue.sort(); // Deterministic order - - let mut result = Vec::new(); - while let Some(name) = queue.pop() { - result.push(name.clone()); - if let Some(type_deps) = deps.get(&name) { - for dep in type_deps { - if let Some(count) = in_degree.get_mut(dep) { - *count = count.saturating_sub(1); - if *count == 0 { - queue.push(dep.clone()); - queue.sort(); // Keep sorted for determinism - } - } - } - } - } - - // Reverse so dependencies come first - result.reverse(); - - // Add any types that weren't processed (e.g., due to circular refs or other edge cases) - let result_set: HashSet<_> = result.iter().cloned().collect(); - let mut missing: Vec<_> = schemas - .keys() - .filter(|k| !result_set.contains(*k)) - .cloned() - .collect(); - missing.sort(); - result.extend(missing); - - result -} - -/// Collect all type references ($ref) from a schema -fn collect_schema_refs(schema: &Value, refs: &mut std::collections::HashSet) { - match schema { - Value::Object(map) => { - if let Some(ref_path) = map.get("$ref").and_then(|r| r.as_str()) - && let Some(type_name) = ref_path.rsplit('/').next() - { - refs.insert(type_name.to_string()); - } - for value in map.values() { - collect_schema_refs(value, refs); - } - } - Value::Array(arr) => { - for item in arr { - collect_schema_refs(item, refs); - } - } - _ => {} - } -} - -/// Convert a single JSON type string to Python type -fn json_type_to_python(ty: &str, schema: &Value, current_type: Option<&str>) -> String { - match ty { - "integer" => "int".to_string(), - "number" => "float".to_string(), - "boolean" => "bool".to_string(), - "string" => "str".to_string(), - "null" => "None".to_string(), - "array" => { - let item_type = schema - .get("items") - .map(|s| schema_to_python_type_ctx(s, current_type)) - .unwrap_or_else(|| "Any".to_string()); - format!("List[{}]", item_type) - } - "object" => { - if let Some(add_props) = schema.get("additionalProperties") { - let value_type = schema_to_python_type_ctx(add_props, current_type); - return format!("dict[str, {}]", value_type); - } - "dict".to_string() - } - _ => "Any".to_string(), - } -} - -/// Convert JSON Schema to Python type with context for detecting self-references -fn schema_to_python_type_ctx(schema: &Value, current_type: Option<&str>) -> String { - if let Some(all_of) = schema.get("allOf").and_then(|v| v.as_array()) { - for item in all_of { - let resolved = schema_to_python_type_ctx(item, current_type); - if resolved != "Any" { - return resolved; - } - } - } - - // Handle $ref - if let Some(ref_path) = schema.get("$ref").and_then(|r| r.as_str()) { - let type_name = ref_path.rsplit('/').next().unwrap_or("Any"); - // Quote self-references to handle recursive types - if current_type == Some(type_name) { - return format!("\"{}\"", type_name); - } - return type_name.to_string(); - } - - // Handle enum (array of string values) - if let Some(enum_values) = schema.get("enum").and_then(|e| e.as_array()) { - let literals: Vec = enum_values - .iter() - .filter_map(|v| v.as_str()) - .map(|s| format!("\"{}\"", s)) - .collect(); - if !literals.is_empty() { - return format!("Literal[{}]", literals.join(", ")); - } - } - - if let Some(ty) = schema.get("type") { - if let Some(type_array) = ty.as_array() { - let types: Vec = type_array - .iter() - .filter_map(|t| t.as_str()) - .filter(|t| *t != "null") // Filter out null for cleaner Optional handling - .map(|t| json_type_to_python(t, schema, current_type)) - .collect(); - let has_null = type_array.iter().any(|t| t.as_str() == Some("null")); - - if types.len() == 1 { - let base_type = &types[0]; - return if has_null { - format!("Optional[{}]", base_type) - } else { - base_type.clone() - }; - } else if !types.is_empty() { - let union = format!("Union[{}]", types.join(", ")); - return if has_null { - format!("Optional[{}]", union) - } else { - union - }; - } - } - - if let Some(ty_str) = ty.as_str() { - return json_type_to_python(ty_str, schema, current_type); - } - } - - if let Some(variants) = schema - .get("anyOf") - .or_else(|| schema.get("oneOf")) - .and_then(|v| v.as_array()) - { - let types: Vec = variants - .iter() - .map(|v| schema_to_python_type_ctx(v, current_type)) - .collect(); - let filtered: Vec<_> = types.iter().filter(|t| *t != "Any").collect(); - if !filtered.is_empty() { - return format!( - "Union[{}]", - filtered - .iter() - .map(|s| s.as_str()) - .collect::>() - .join(", ") - ); - } - return format!("Union[{}]", types.join(", ")); - } - - // Check for format hint without type (common in OpenAPI) - if let Some(format) = schema.get("format").and_then(|f| f.as_str()) { - return match format { - "int32" | "int64" => "int".to_string(), - "float" | "double" => "float".to_string(), - "date" | "date-time" => "str".to_string(), - _ => "Any".to_string(), - }; - } - - "Any".to_string() -} - -/// Make a name safe for Python: escape keywords and prefix digit-starting names -fn escape_python_keyword(name: &str) -> String { - const PYTHON_KEYWORDS: &[&str] = &[ - "False", "None", "True", "and", "as", "assert", "async", "await", "break", "class", - "continue", "def", "del", "elif", "else", "except", "finally", "for", "from", "global", - "if", "import", "in", "is", "lambda", "nonlocal", "not", "or", "pass", "raise", "return", - "try", "while", "with", "yield", - ]; - // Names starting with digit need underscore prefix - let name = if name - .chars() - .next() - .map(|c| c.is_ascii_digit()) - .unwrap_or(false) - { - format!("_{}", name) - } else { - name.to_string() - }; - // Reserved keywords get underscore suffix - if PYTHON_KEYWORDS.contains(&name.as_str()) { - format!("{}_", name) - } else { - name - } -} - -/// Generate the base BrkClient class with HTTP functionality -fn generate_base_client(output: &mut String) { - writeln!( - output, - r#"class BrkError(Exception): - """Custom error class for BRK client errors.""" - - def __init__(self, message: str, status: Optional[int] = None): - super().__init__(message) - self.status = status - - -class BrkClientBase: - """Base HTTP client for making requests.""" - - def __init__(self, base_url: str, timeout: float = 30.0): - self.base_url = base_url - self.timeout = timeout - self._client = httpx.Client(timeout=timeout) - - def get(self, path: str) -> Any: - """Make a GET request.""" - try: - response = self._client.get(f"{{self.base_url}}{{path}}") - response.raise_for_status() - return response.json() - except httpx.HTTPStatusError as e: - raise BrkError(f"HTTP error: {{e.response.status_code}}", e.response.status_code) - except httpx.RequestError as e: - raise BrkError(str(e)) - - def close(self): - """Close the HTTP client.""" - self._client.close() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - -"# - ) - .unwrap(); -} - -/// Generate the MetricNode class -fn generate_metric_node(output: &mut String) { - writeln!( - output, - r#"class MetricNode(Generic[T]): - """A metric node that can fetch data for different indexes.""" - - def __init__(self, client: BrkClientBase, path: str): - self._client = client - self._path = path - - def get(self) -> List[T]: - """Fetch all data points for this metric.""" - return self._client.get(self._path) - - def get_range(self, from_val: Optional[str] = None, to_val: Optional[str] = None) -> List[T]: - """Fetch data points within a range.""" - params = [] - if from_val is not None: - params.append(f"from={{from_val}}") - if to_val is not None: - params.append(f"to={{to_val}}") - query = "&".join(params) - return self._client.get(f"{{self._path}}?{{query}}" if query else self._path) - -"# - ) - .unwrap(); -} - -/// Generate index accessor classes -fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) { - if patterns.is_empty() { - return; - } - - writeln!(output, "# Index accessor classes\n").unwrap(); - - for pattern in patterns { - writeln!(output, "class {}(Generic[T]):", pattern.name).unwrap(); - writeln!( - output, - " \"\"\"Index accessor for metrics with {} indexes.\"\"\"", - pattern.indexes.len() - ) - .unwrap(); - writeln!(output, " ").unwrap(); - writeln!( - output, - " def __init__(self, client: BrkClientBase, base_path: str):" - ) - .unwrap(); - - for index in &pattern.indexes { - let field_name = index_to_snake_case(index); - let path_segment = index.serialize_long(); - writeln!( - output, - " self.{}: MetricNode[T] = MetricNode(client, f'{{base_path}}/{}')", - field_name, path_segment - ) - .unwrap(); - } - - writeln!(output).unwrap(); - } -} - -/// Convert an Index to a snake_case field name (e.g., DateIndex -> by_date_index) -fn index_to_snake_case(index: &Index) -> String { - format!("by_{}", to_snake_case(index.serialize_long())) -} - -/// Generate structural pattern classes -fn generate_structural_patterns( - output: &mut String, - patterns: &[StructuralPattern], - metadata: &ClientMetadata, -) { - if patterns.is_empty() { - return; - } - - writeln!(output, "# Reusable structural pattern classes\n").unwrap(); - - for pattern in patterns { - let is_parameterizable = pattern.is_parameterizable(); - - // For generic patterns, inherit from Generic[T] - if pattern.is_generic { - writeln!(output, "class {}(Generic[T]):", pattern.name).unwrap(); - } else { - writeln!(output, "class {}:", pattern.name).unwrap(); - } - writeln!( - output, - " \"\"\"Pattern struct for repeated tree structure.\"\"\"" - ) - .unwrap(); - writeln!(output, " ").unwrap(); - - if is_parameterizable { - writeln!( - output, - " def __init__(self, client: BrkClientBase, acc: str):" - ) - .unwrap(); - writeln!( - output, - " \"\"\"Create pattern node with accumulated metric name.\"\"\"" - ) - .unwrap(); - } else { - writeln!( - output, - " def __init__(self, client: BrkClientBase, base_path: str):" - ) - .unwrap(); - } - - for field in &pattern.fields { - if is_parameterizable { - generate_parameterized_python_field(output, field, pattern, metadata); - } else { - generate_tree_path_python_field(output, field, metadata); - } - } - - writeln!(output).unwrap(); - } -} - -/// Generate a field using parameterized (prepend/append) metric name construction -fn generate_parameterized_python_field( - output: &mut String, - field: &PatternField, - pattern: &StructuralPattern, - metadata: &ClientMetadata, -) { - let field_name = to_snake_case(&field.name); - let py_type = field_to_python_type_generic(field, metadata, pattern.is_generic); - - // For branch fields, pass the accumulated name to nested pattern - if metadata.is_pattern_type(&field.rust_type) { - let child_acc = if let Some(pos) = pattern.get_field_position(&field.name) { - match pos { - FieldNamePosition::Append(suffix) => format!("f'{{acc}}{}'", suffix), - FieldNamePosition::Prepend(prefix) => format!("f'{}{{acc}}'", prefix), - FieldNamePosition::Identity => "acc".to_string(), - FieldNamePosition::SetBase(base) => format!("'{}'", base), - } - } else { - format!("f'{{acc}}_{}'", field.name) - }; - - writeln!( - output, - " self.{}: {} = {}(client, {})", - field_name, py_type, field.rust_type, child_acc - ) - .unwrap(); - return; - } - - // For leaf fields, construct the metric path based on position - let metric_expr = if let Some(pos) = pattern.get_field_position(&field.name) { - match pos { - FieldNamePosition::Append(suffix) => format!("f'{{acc}}{}'", suffix), - FieldNamePosition::Prepend(prefix) => format!("f'{}{{acc}}'", prefix), - FieldNamePosition::Identity => "acc".to_string(), - FieldNamePosition::SetBase(base) => format!("'{}'", base), - } - } else { - format!("f'{{acc}}_{}'", field.name) - }; - - if metadata.field_uses_accessor(field) { - let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); - writeln!( - output, - " self.{}: {} = {}(client, {})", - field_name, py_type, accessor.name, metric_expr - ) - .unwrap(); - } else { - // Direct MetricNode without indexes - pass metric name - writeln!( - output, - " self.{}: {} = MetricNode(client, {})", - field_name, py_type, metric_expr - ) - .unwrap(); - } -} - -/// Generate a field using tree path construction (fallback for non-parameterizable patterns) -fn generate_tree_path_python_field( - output: &mut String, - field: &PatternField, - metadata: &ClientMetadata, -) { - let field_name = to_snake_case(&field.name); - let py_type = field_to_python_type(field, metadata); - - if metadata.is_pattern_type(&field.rust_type) { - writeln!( - output, - " self.{}: {} = {}(client, f'{{base_path}}_{}')", - field_name, py_type, field.rust_type, field.name - ) - .unwrap(); - } else if metadata.field_uses_accessor(field) { - let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); - writeln!( - output, - " self.{}: {} = {}(client, f'{{base_path}}_{}')", - field_name, py_type, accessor.name, field.name - ) - .unwrap(); - } else { - writeln!( - output, - " self.{}: {} = MetricNode(client, f'{{base_path}}_{}')", - field_name, py_type, field.name - ) - .unwrap(); - } -} - -/// Convert pattern field to Python type annotation -fn field_to_python_type(field: &PatternField, metadata: &ClientMetadata) -> String { - field_to_python_type_generic(field, metadata, false) -} - -/// Convert pattern field to Python type annotation, with optional generic support -fn field_to_python_type_generic( - field: &PatternField, - metadata: &ClientMetadata, - is_generic: bool, -) -> String { - field_to_python_type_with_generic_value(field, metadata, is_generic, None) -} - -/// Convert pattern field to Python type annotation. -/// - `is_generic`: If true and field.rust_type is "T", use T in the output -/// - `generic_value_type`: For branch fields that reference a generic pattern, this is the concrete type to substitute -fn field_to_python_type_with_generic_value( - field: &PatternField, - metadata: &ClientMetadata, - is_generic: bool, - generic_value_type: Option<&str>, -) -> String { - // For generic patterns, use T instead of concrete value type - // Also extract inner type from wrappers like Close -> Dollars - let value_type = if is_generic && field.rust_type == "T" { - "T".to_string() - } else { - extract_inner_type(&field.rust_type) - }; - - if metadata.is_pattern_type(&field.rust_type) { - if metadata.is_pattern_generic(&field.rust_type) { - // Use type_param from field, then generic_value_type, then T if parent is generic - let type_param = field - .type_param - .as_deref() - .or(generic_value_type) - .unwrap_or(if is_generic { "T" } else { "Any" }); - return format!("{}[{}]", field.rust_type, type_param); - } - field.rust_type.clone() - } else if field.is_branch() { - // Non-pattern branch struct - field.rust_type.clone() - } else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) { - // Leaf with accessor - use value_type as the generic - format!("{}[{}]", accessor.name, value_type) - } else { - // Leaf - use value_type as the generic - format!("MetricNode[{}]", value_type) - } -} - -/// Generate tree classes -fn generate_tree_classes(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) { - writeln!(output, "# Catalog tree classes\n").unwrap(); - - let pattern_lookup = metadata.pattern_lookup(); - let mut generated = HashSet::new(); - generate_tree_class( - output, - "CatalogTree", - catalog, - &pattern_lookup, - metadata, - &mut generated, - ); -} - -/// Recursively generate tree classes -fn generate_tree_class( - output: &mut String, - name: &str, - node: &TreeNode, - pattern_lookup: &std::collections::HashMap, String>, - metadata: &ClientMetadata, - generated: &mut HashSet, -) { - let TreeNode::Branch(children) = node else { - return; - }; - - let fields_with_child_info = get_fields_with_child_info(children, name, pattern_lookup); - let fields: Vec = fields_with_child_info - .iter() - .map(|(f, _)| f.clone()) - .collect(); - - // Skip if this matches a pattern (already generated) - if pattern_lookup.contains_key(&fields) - && pattern_lookup.get(&fields) != Some(&name.to_string()) - { - return; - } - - if generated.contains(name) { - return; - } - generated.insert(name.to_string()); - - writeln!(output, "class {}:", name).unwrap(); - writeln!(output, " \"\"\"Catalog tree node.\"\"\"").unwrap(); - writeln!(output, " ").unwrap(); - writeln!( - output, - " def __init__(self, client: BrkClientBase, base_path: str = ''):" - ) - .unwrap(); - - for ((field, child_fields_opt), (child_name, child_node)) in - fields_with_child_info.iter().zip(children.iter()) - { - // Look up type parameter for generic patterns - let generic_value_type = child_fields_opt - .as_ref() - .and_then(|cf| metadata.get_type_param(cf)) - .map(String::as_str); - let py_type = - field_to_python_type_with_generic_value(field, metadata, false, generic_value_type); - let field_name_py = to_snake_case(&field.name); - - if metadata.is_pattern_type(&field.rust_type) { - let pattern = metadata.find_pattern(&field.rust_type); - let is_parameterizable = pattern.is_some_and(|p| p.is_parameterizable()); - - if is_parameterizable { - let metric_base = get_pattern_instance_base(child_node, child_name); - writeln!( - output, - " self.{}: {} = {}(client, '{}')", - field_name_py, py_type, field.rust_type, metric_base - ) - .unwrap(); - } else { - writeln!( - output, - " self.{}: {} = {}(client, f'{{base_path}}_{}')", - field_name_py, py_type, field.rust_type, field.name - ) - .unwrap(); - } - } else if metadata.field_uses_accessor(field) { - let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); - writeln!( - output, - " self.{}: {} = {}(client, f'{{base_path}}_{}')", - field_name_py, py_type, accessor.name, field.name - ) - .unwrap(); - } else if field.is_branch() { - // Non-pattern branch - instantiate the nested class - writeln!( - output, - " self.{}: {} = {}(client, f'{{base_path}}_{}')", - field_name_py, py_type, field.rust_type, field.name - ) - .unwrap(); - } else { - // Leaf metric - direct MetricNode with full API path - writeln!( - output, - " self.{}: {} = MetricNode(client, f'{{base_path}}_{}')", - field_name_py, py_type, field.name - ) - .unwrap(); - } - } - - writeln!(output).unwrap(); - - // Generate child classes - for (child_name, child_node) in children { - if let TreeNode::Branch(grandchildren) = child_node { - let child_fields = get_node_fields(grandchildren, pattern_lookup); - if !pattern_lookup.contains_key(&child_fields) { - let child_class_name = format!("{}_{}", name, to_pascal_case(child_name)); - generate_tree_class( - output, - &child_class_name, - child_node, - pattern_lookup, - metadata, - generated, - ); - } - } - } -} - -/// Generate the main client class -fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) { - writeln!(output, "class BrkClient(BrkClientBase):").unwrap(); - writeln!( - output, - " \"\"\"Main BRK client with catalog tree and API methods.\"\"\"" - ) - .unwrap(); - writeln!(output).unwrap(); - - // Generate class-level constants - generate_class_constants(output); - - writeln!( - output, - " def __init__(self, base_url: str = 'http://localhost:3000', timeout: float = 30.0):" - ) - .unwrap(); - writeln!(output, " super().__init__(base_url, timeout)").unwrap(); - writeln!(output, " self.tree = CatalogTree(self)").unwrap(); - writeln!(output).unwrap(); - - // Generate API methods - generate_api_methods(output, endpoints); -} - -/// Generate API methods from OpenAPI endpoints -fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) { - for endpoint in endpoints { - if !endpoint.should_generate() { - continue; - } - - let method_name = endpoint_to_method_name(endpoint); - let return_type = endpoint - .response_type - .as_deref() - .map(js_type_to_python) - .unwrap_or_else(|| "Any".to_string()); - - // Build method signature - let params = build_method_params(endpoint); - writeln!( - output, - " def {}(self{}) -> {}:", - method_name, params, return_type - ) - .unwrap(); - - // Docstring - match (&endpoint.summary, &endpoint.description) { - (Some(summary), Some(desc)) if summary != desc => { - writeln!(output, " \"\"\"{}.", summary.trim_end_matches('.')).unwrap(); - writeln!(output).unwrap(); - writeln!(output, " {}\"\"\"", desc).unwrap(); - } - (Some(summary), _) => { - writeln!(output, " \"\"\"{}\"\"\"", summary).unwrap(); - } - (None, Some(desc)) => { - writeln!(output, " \"\"\"{}\"\"\"", desc).unwrap(); - } - (None, None) => {} - } - - // Build path - let path = build_path_template(&endpoint.path, &endpoint.path_params); - - if endpoint.query_params.is_empty() { - if endpoint.path_params.is_empty() { - writeln!(output, " return self.get('{}')", path).unwrap(); - } else { - writeln!(output, " return self.get(f'{}')", path).unwrap(); - } - } else { - writeln!(output, " params = []").unwrap(); - for param in &endpoint.query_params { - // Use safe name for Python variable, original name for API query parameter - let safe_name = escape_python_keyword(¶m.name); - if param.required { - writeln!( - output, - " params.append(f'{}={{{}}}')", - param.name, safe_name - ) - .unwrap(); - } else { - writeln!( - output, - " if {} is not None: params.append(f'{}={{{}}}')", - safe_name, param.name, safe_name - ) - .unwrap(); - } - } - writeln!(output, " query = '&'.join(params)").unwrap(); - writeln!( - output, - " return self.get(f'{}{{\"?\" + query if query else \"\"}}')", - path - ) - .unwrap(); - } - - writeln!(output).unwrap(); - } -} - -fn endpoint_to_method_name(endpoint: &Endpoint) -> String { - to_snake_case(&endpoint.operation_name()) -} - -/// Convert JS-style type to Python type (e.g., "Txid[]" -> "List[Txid]", "number" -> "int") -fn js_type_to_python(js_type: &str) -> String { - if let Some(inner) = js_type.strip_suffix("[]") { - format!("List[{}]", js_type_to_python(inner)) - } else { - match js_type { - "number" => "int".to_string(), - "boolean" => "bool".to_string(), - "string" => "str".to_string(), - "null" => "None".to_string(), - "Object" | "object" => "dict".to_string(), - "*" => "Any".to_string(), - _ => js_type.to_string(), - } - } -} - -fn build_method_params(endpoint: &Endpoint) -> String { - let mut params = Vec::new(); - for param in &endpoint.path_params { - let safe_name = escape_python_keyword(¶m.name); - let py_type = js_type_to_python(¶m.param_type); - params.push(format!(", {}: {}", safe_name, py_type)); - } - for param in &endpoint.query_params { - let safe_name = escape_python_keyword(¶m.name); - let py_type = js_type_to_python(¶m.param_type); - if param.required { - params.push(format!(", {}: {}", safe_name, py_type)); - } else { - params.push(format!(", {}: Optional[{}] = None", safe_name, py_type)); - } - } - params.join("") -} - -fn build_path_template(path: &str, path_params: &[super::Parameter]) -> String { - let mut result = path.to_string(); - for param in path_params { - let placeholder = format!("{{{}}}", param.name); - // Use escaped name for Python variable interpolation in f-string - let safe_name = escape_python_keyword(¶m.name); - let interpolation = format!("{{{}}}", safe_name); - result = result.replace(&placeholder, &interpolation); - } - result -} diff --git a/crates/brk_binder/src/rust.rs b/crates/brk_binder/src/rust.rs deleted file mode 100644 index 4a700f3ac..000000000 --- a/crates/brk_binder/src/rust.rs +++ /dev/null @@ -1,733 +0,0 @@ -use std::{collections::HashSet, fmt::Write as FmtWrite, fs, io, path::Path}; - -use brk_types::{Index, TreeNode}; - -use crate::{ - ClientMetadata, Endpoint, FieldNamePosition, IndexSetPattern, PatternField, StructuralPattern, - extract_inner_type, get_fields_with_child_info, get_node_fields, get_pattern_instance_base, - to_pascal_case, to_snake_case, -}; - -/// Generate Rust client from metadata and OpenAPI endpoints. -/// -/// `output_path` is the full path to the output file (e.g., "crates/brk_client/src/lib.rs"). -pub fn generate_rust_client( - metadata: &ClientMetadata, - endpoints: &[Endpoint], - output_path: &Path, -) -> io::Result<()> { - let mut output = String::new(); - - writeln!(output, "// Auto-generated BRK Rust client").unwrap(); - writeln!(output, "// Do not edit manually\n").unwrap(); - writeln!(output, "#![allow(non_camel_case_types)]").unwrap(); - writeln!(output, "#![allow(dead_code)]").unwrap(); - writeln!(output, "#![allow(unused_variables)]").unwrap(); - writeln!(output, "#![allow(clippy::useless_format)]").unwrap(); - writeln!(output, "#![allow(clippy::unnecessary_to_owned)]\n").unwrap(); - - generate_imports(&mut output); - generate_base_client(&mut output); - generate_metric_node(&mut output); - generate_index_accessors(&mut output, &metadata.index_set_patterns); - generate_pattern_structs(&mut output, &metadata.structural_patterns, metadata); - generate_tree(&mut output, &metadata.catalog, metadata); - generate_main_client(&mut output, endpoints); - - fs::write(output_path, output)?; - - Ok(()) -} - -fn generate_imports(output: &mut String) { - writeln!( - output, - r#"use std::sync::Arc; -use serde::de::DeserializeOwned; -pub use brk_cohort::*; -pub use brk_types::*; - -"# - ) - .unwrap(); -} - -fn generate_base_client(output: &mut String) { - writeln!( - output, - r#"/// Error type for BRK client operations. -#[derive(Debug)] -pub struct BrkError {{ - pub message: String, -}} - -impl std::fmt::Display for BrkError {{ - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {{ - write!(f, "{{}}", self.message) - }} -}} - -impl std::error::Error for BrkError {{}} - -/// Result type for BRK client operations. -pub type Result = std::result::Result; - -/// Options for configuring the BRK client. -#[derive(Debug, Clone)] -pub struct BrkClientOptions {{ - pub base_url: String, - pub timeout_secs: u64, -}} - -impl Default for BrkClientOptions {{ - fn default() -> Self {{ - Self {{ - base_url: "http://localhost:3000".to_string(), - timeout_secs: 30, - }} - }} -}} - -/// Base HTTP client for making requests. -#[derive(Debug, Clone)] -pub struct BrkClientBase {{ - base_url: String, - timeout_secs: u64, -}} - -impl BrkClientBase {{ - /// Create a new client with the given base URL. - pub fn new(base_url: impl Into) -> Self {{ - Self {{ - base_url: base_url.into(), - timeout_secs: 30, - }} - }} - - /// Create a new client with options. - pub fn with_options(options: BrkClientOptions) -> Self {{ - Self {{ - base_url: options.base_url, - timeout_secs: options.timeout_secs, - }} - }} - - /// Make a GET request. - pub fn get(&self, path: &str) -> Result {{ - let url = format!("{{}}{{}}", self.base_url, path); - let response = minreq::get(&url) - .with_timeout(self.timeout_secs) - .send() - .map_err(|e| BrkError {{ message: e.to_string() }})?; - - if response.status_code >= 400 {{ - return Err(BrkError {{ - message: format!("HTTP {{}}", response.status_code), - }}); - }} - - response - .json() - .map_err(|e| BrkError {{ message: e.to_string() }}) - }} -}} - -"# - ) - .unwrap(); -} - -fn generate_metric_node(output: &mut String) { - writeln!( - output, - r#"/// A metric node that can fetch data for different indexes. -pub struct MetricNode {{ - client: Arc, - path: String, - _marker: std::marker::PhantomData, -}} - -impl MetricNode {{ - pub fn new(client: Arc, path: String) -> Self {{ - Self {{ - client, - path, - _marker: std::marker::PhantomData, - }} - }} - - /// Fetch all data points for this metric. - pub fn get(&self) -> Result> {{ - self.client.get(&self.path) - }} - - /// Fetch data points within a range. - pub fn get_range(&self, from: Option<&str>, to: Option<&str>) -> Result> {{ - let mut params = Vec::new(); - if let Some(f) = from {{ params.push(format!("from={{}}", f)); }} - if let Some(t) = to {{ params.push(format!("to={{}}", t)); }} - let path = if params.is_empty() {{ - self.path.clone() - }} else {{ - format!("{{}}?{{}}", self.path, params.join("&")) - }}; - self.client.get(&path) - }} -}} - -"# - ) - .unwrap(); -} - -fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) { - if patterns.is_empty() { - return; - } - - writeln!(output, "// Index accessor structs\n").unwrap(); - - for pattern in patterns { - writeln!( - output, - "/// Index accessor for metrics with {} indexes.", - pattern.indexes.len() - ) - .unwrap(); - writeln!(output, "pub struct {} {{", pattern.name).unwrap(); - - for index in &pattern.indexes { - let field_name = index_to_field_name(index); - writeln!(output, " pub {}: MetricNode,", field_name).unwrap(); - } - - writeln!(output, "}}\n").unwrap(); - - // Generate impl block with constructor - writeln!(output, "impl {} {{", pattern.name).unwrap(); - writeln!( - output, - " pub fn new(client: Arc, base_path: &str) -> Self {{" - ) - .unwrap(); - writeln!(output, " Self {{").unwrap(); - - for index in &pattern.indexes { - let field_name = index_to_field_name(index); - let path_segment = index.serialize_long(); - writeln!( - output, - " {}: MetricNode::new(client.clone(), format!(\"{{base_path}}/{}\")),", - field_name, path_segment - ) - .unwrap(); - } - - writeln!(output, " }}").unwrap(); - writeln!(output, " }}").unwrap(); - writeln!(output, "}}\n").unwrap(); - } -} - -fn index_to_field_name(index: &Index) -> String { - format!("by_{}", to_snake_case(index.serialize_long())) -} - -fn generate_pattern_structs( - output: &mut String, - patterns: &[StructuralPattern], - metadata: &ClientMetadata, -) { - if patterns.is_empty() { - return; - } - - writeln!(output, "// Reusable pattern structs\n").unwrap(); - - for pattern in patterns { - let is_parameterizable = pattern.is_parameterizable(); - let generic_params = if pattern.is_generic { "" } else { "" }; - - writeln!(output, "/// Pattern struct for repeated tree structure.").unwrap(); - writeln!(output, "pub struct {}{} {{", pattern.name, generic_params).unwrap(); - - for field in &pattern.fields { - let field_name = to_snake_case(&field.name); - let type_annotation = - field_to_type_annotation_generic(field, metadata, pattern.is_generic); - writeln!(output, " pub {}: {},", field_name, type_annotation).unwrap(); - } - - writeln!(output, "}}\n").unwrap(); - - // Generate impl block with constructor - let impl_generic = if pattern.is_generic { - "" - } else { - "" - }; - writeln!( - output, - "impl{} {}{} {{", - impl_generic, pattern.name, generic_params - ) - .unwrap(); - - if is_parameterizable { - writeln!( - output, - " /// Create a new pattern node with accumulated metric name." - ) - .unwrap(); - writeln!( - output, - " pub fn new(client: Arc, acc: &str) -> Self {{" - ) - .unwrap(); - } else { - writeln!( - output, - " pub fn new(client: Arc, base_path: &str) -> Self {{" - ) - .unwrap(); - } - writeln!(output, " Self {{").unwrap(); - - for field in &pattern.fields { - if is_parameterizable { - generate_parameterized_rust_field(output, field, pattern, metadata); - } else { - generate_tree_path_rust_field(output, field, metadata); - } - } - - writeln!(output, " }}").unwrap(); - writeln!(output, " }}").unwrap(); - writeln!(output, "}}\n").unwrap(); - } -} - -fn generate_parameterized_rust_field( - output: &mut String, - field: &PatternField, - pattern: &StructuralPattern, - metadata: &ClientMetadata, -) { - let field_name = to_snake_case(&field.name); - - if metadata.is_pattern_type(&field.rust_type) { - let child_acc = if let Some(pos) = pattern.get_field_position(&field.name) { - match pos { - FieldNamePosition::Append(suffix) => format!("&format!(\"{{acc}}{}\")", suffix), - FieldNamePosition::Prepend(prefix) => format!("&format!(\"{}{{acc}}\")", prefix), - FieldNamePosition::Identity => "acc".to_string(), - FieldNamePosition::SetBase(base) => format!("\"{}\"", base), - } - } else { - format!("&format!(\"{{acc}}_{}\")", field.name) - }; - - writeln!( - output, - " {}: {}::new(client.clone(), {}),", - field_name, field.rust_type, child_acc - ) - .unwrap(); - return; - } - - let metric_expr = if let Some(pos) = pattern.get_field_position(&field.name) { - match pos { - FieldNamePosition::Append(suffix) => format!("format!(\"{{acc}}{}\")", suffix), - FieldNamePosition::Prepend(prefix) => format!("format!(\"{}{{acc}}\")", prefix), - FieldNamePosition::Identity => "acc.to_string()".to_string(), - FieldNamePosition::SetBase(base) => format!("\"{}\".to_string()", base), - } - } else { - format!("format!(\"{{acc}}_{}\")", field.name) - }; - - if metadata.field_uses_accessor(field) { - let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); - writeln!( - output, - " {}: {}::new(client.clone(), &{}),", - field_name, accessor.name, metric_expr - ) - .unwrap(); - } else { - writeln!( - output, - " {}: MetricNode::new(client.clone(), {}),", - field_name, metric_expr - ) - .unwrap(); - } -} - -fn generate_tree_path_rust_field( - output: &mut String, - field: &PatternField, - metadata: &ClientMetadata, -) { - let field_name = to_snake_case(&field.name); - - if metadata.is_pattern_type(&field.rust_type) { - writeln!( - output, - " {}: {}::new(client.clone(), &format!(\"{{base_path}}_{}\")),", - field_name, field.rust_type, field.name - ) - .unwrap(); - } else if metadata.field_uses_accessor(field) { - let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); - writeln!( - output, - " {}: {}::new(client.clone(), &format!(\"{{base_path}}_{}\")),", - field_name, accessor.name, field.name - ) - .unwrap(); - } else { - writeln!( - output, - " {}: MetricNode::new(client.clone(), format!(\"{{base_path}}_{}\")),", - field_name, field.name - ) - .unwrap(); - } -} - -fn field_to_type_annotation_generic( - field: &PatternField, - metadata: &ClientMetadata, - is_generic: bool, -) -> String { - field_to_type_annotation_with_generic(field, metadata, is_generic, None) -} - -fn field_to_type_annotation_with_generic( - field: &PatternField, - metadata: &ClientMetadata, - is_generic: bool, - generic_value_type: Option<&str>, -) -> String { - let value_type = if is_generic && field.rust_type == "T" { - "T".to_string() - } else { - extract_inner_type(&field.rust_type) - }; - - if metadata.is_pattern_type(&field.rust_type) { - if metadata.is_pattern_generic(&field.rust_type) { - // Use type_param from field, then generic_value_type, then T if parent is generic - let type_param = field - .type_param - .as_deref() - .or(generic_value_type) - .unwrap_or(if is_generic { "T" } else { "_" }); - return format!("{}<{}>", field.rust_type, type_param); - } - field.rust_type.clone() - } else if field.is_branch() { - // Non-pattern branch struct - field.rust_type.clone() - } else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) { - format!("{}<{}>", accessor.name, value_type) - } else { - format!("MetricNode<{}>", value_type) - } -} - -fn generate_tree(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) { - writeln!(output, "// Catalog tree\n").unwrap(); - - let pattern_lookup = metadata.pattern_lookup(); - let mut generated = HashSet::new(); - generate_tree_node( - output, - "CatalogTree", - catalog, - &pattern_lookup, - metadata, - &mut generated, - ); -} - -fn generate_tree_node( - output: &mut String, - name: &str, - node: &TreeNode, - pattern_lookup: &std::collections::HashMap, String>, - metadata: &ClientMetadata, - generated: &mut HashSet, -) { - let TreeNode::Branch(children) = node else { - return; - }; - - let fields_with_child_info = get_fields_with_child_info(children, name, pattern_lookup); - let fields: Vec = fields_with_child_info - .iter() - .map(|(f, _)| f.clone()) - .collect(); - - if let Some(pattern_name) = pattern_lookup.get(&fields) - && pattern_name != name - { - return; - } - - if generated.contains(name) { - return; - } - generated.insert(name.to_string()); - - writeln!(output, "/// Catalog tree node.").unwrap(); - writeln!(output, "pub struct {} {{", name).unwrap(); - - for (field, child_fields) in &fields_with_child_info { - let field_name = to_snake_case(&field.name); - // Look up type parameter for generic patterns - let generic_value_type = child_fields - .as_ref() - .and_then(|cf| metadata.get_type_param(cf)) - .map(String::as_str); - let type_annotation = - field_to_type_annotation_with_generic(field, metadata, false, generic_value_type); - writeln!(output, " pub {}: {},", field_name, type_annotation).unwrap(); - } - - writeln!(output, "}}\n").unwrap(); - - writeln!(output, "impl {} {{", name).unwrap(); - writeln!( - output, - " pub fn new(client: Arc, base_path: &str) -> Self {{" - ) - .unwrap(); - writeln!(output, " Self {{").unwrap(); - - for (field, (child_name, child_node)) in fields.iter().zip(children.iter()) { - let field_name = to_snake_case(&field.name); - if metadata.is_pattern_type(&field.rust_type) { - let pattern = metadata.find_pattern(&field.rust_type); - let is_parameterizable = pattern.is_some_and(|p| p.is_parameterizable()); - - if is_parameterizable { - let metric_base = get_pattern_instance_base(child_node, child_name); - writeln!( - output, - " {}: {}::new(client.clone(), \"{}\"),", - field_name, field.rust_type, metric_base - ) - .unwrap(); - } else { - writeln!( - output, - " {}: {}::new(client.clone(), &format!(\"{{base_path}}_{}\")),", - field_name, field.rust_type, field.name - ) - .unwrap(); - } - } else if metadata.field_uses_accessor(field) { - let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); - writeln!( - output, - " {}: {}::new(client.clone(), &format!(\"{{base_path}}_{}\")),", - field_name, accessor.name, field.name - ) - .unwrap(); - } else if field.is_branch() { - // Non-pattern branch - instantiate the nested struct - writeln!( - output, - " {}: {}::new(client.clone(), &format!(\"{{base_path}}_{}\")),", - field_name, field.rust_type, field.name - ) - .unwrap(); - } else { - // Leaf - use MetricNode with base_path - writeln!( - output, - " {}: MetricNode::new(client.clone(), format!(\"{{base_path}}_{}\")),", - field_name, field.name - ) - .unwrap(); - } - } - - writeln!(output, " }}").unwrap(); - writeln!(output, " }}").unwrap(); - writeln!(output, "}}\n").unwrap(); - - for (child_name, child_node) in children { - if let TreeNode::Branch(grandchildren) = child_node { - let child_fields = get_node_fields(grandchildren, pattern_lookup); - if !pattern_lookup.contains_key(&child_fields) { - let child_struct_name = format!("{}_{}", name, to_pascal_case(child_name)); - generate_tree_node( - output, - &child_struct_name, - child_node, - pattern_lookup, - metadata, - generated, - ); - } - } - } -} - -fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) { - writeln!( - output, - r#"/// Main BRK client with catalog tree and API methods. -pub struct BrkClient {{ - base: Arc, - tree: CatalogTree, -}} - -impl BrkClient {{ - /// Client version. - pub const VERSION: &'static str = "v{VERSION}"; - - /// Create a new client with the given base URL. - pub fn new(base_url: impl Into) -> Self {{ - let base = Arc::new(BrkClientBase::new(base_url)); - let tree = CatalogTree::new(base.clone(), ""); - Self {{ base, tree }} - }} - - /// Create a new client with options. - pub fn with_options(options: BrkClientOptions) -> Self {{ - let base = Arc::new(BrkClientBase::with_options(options)); - let tree = CatalogTree::new(base.clone(), ""); - Self {{ base, tree }} - }} - - /// Get the catalog tree for navigating metrics. - pub fn tree(&self) -> &CatalogTree {{ - &self.tree - }} -"#, - VERSION = crate::VERSION - ) - .unwrap(); - - generate_api_methods(output, endpoints); - - writeln!(output, "}}").unwrap(); -} - -fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) { - for endpoint in endpoints { - if !endpoint.should_generate() { - continue; - } - - let method_name = endpoint_to_method_name(endpoint); - let return_type = endpoint - .response_type - .as_deref() - .map(js_type_to_rust) - .unwrap_or_else(|| "serde_json::Value".to_string()); - - writeln!( - output, - " /// {}", - endpoint.summary.as_deref().unwrap_or(&method_name) - ) - .unwrap(); - if let Some(desc) = &endpoint.description - && endpoint.summary.as_ref() != Some(desc) - { - writeln!(output, " ///").unwrap(); - writeln!(output, " /// {}", desc).unwrap(); - } - - let params = build_method_params(endpoint); - writeln!( - output, - " pub fn {}(&self{}) -> Result<{}> {{", - method_name, params, return_type - ) - .unwrap(); - - let path = build_path_template(&endpoint.path, &endpoint.path_params); - - if endpoint.query_params.is_empty() { - writeln!(output, " self.base.get(&format!(\"{}\"))", path).unwrap(); - } else { - writeln!(output, " let mut query = Vec::new();").unwrap(); - for param in &endpoint.query_params { - if param.required { - writeln!( - output, - " query.push(format!(\"{}={{}}\", {}));", - param.name, param.name - ) - .unwrap(); - } else { - writeln!( - output, - " if let Some(v) = {} {{ query.push(format!(\"{}={{}}\", v)); }}", - param.name, param.name - ) - .unwrap(); - } - } - writeln!(output, " let query_str = if query.is_empty() {{ String::new() }} else {{ format!(\"?{{}}\", query.join(\"&\")) }};").unwrap(); - writeln!( - output, - " self.base.get(&format!(\"{}{{}}\", query_str))", - path - ) - .unwrap(); - } - - writeln!(output, " }}\n").unwrap(); - } -} - -fn endpoint_to_method_name(endpoint: &Endpoint) -> String { - to_snake_case(&endpoint.operation_name()) -} - -fn build_method_params(endpoint: &Endpoint) -> String { - let mut params = Vec::new(); - for param in &endpoint.path_params { - params.push(format!(", {}: &str", param.name)); - } - for param in &endpoint.query_params { - if param.required { - params.push(format!(", {}: &str", param.name)); - } else { - params.push(format!(", {}: Option<&str>", param.name)); - } - } - params.join("") -} - -fn build_path_template(path: &str, path_params: &[super::Parameter]) -> String { - let mut result = path.to_string(); - for param in path_params { - let placeholder = format!("{{{}}}", param.name); - let interpolation = format!("{{{}}}", param.name); - result = result.replace(&placeholder, &interpolation); - } - result -} - -fn js_type_to_rust(js_type: &str) -> String { - if let Some(inner) = js_type.strip_suffix("[]") { - format!("Vec<{}>", js_type_to_rust(inner)) - } else { - match js_type { - "string" => "String".to_string(), - "number" => "f64".to_string(), - "boolean" => "bool".to_string(), - "*" => "serde_json::Value".to_string(), - other => other.to_string(), - } - } -} diff --git a/crates/brk_binder/src/types/mod.rs b/crates/brk_binder/src/types/mod.rs deleted file mode 100644 index 1a0ddc2f7..000000000 --- a/crates/brk_binder/src/types/mod.rs +++ /dev/null @@ -1,193 +0,0 @@ -mod case; -mod patterns; -mod schema; -mod tree; - -pub use case::*; -pub use schema::*; -pub use tree::*; - -use std::collections::{BTreeSet, HashMap}; - -use brk_query::Vecs; -use brk_types::Index; - -/// How a field modifies the accumulated metric name. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum FieldNamePosition { - /// Field prepends a prefix: leaf.name() = prefix + accumulated - Prepend(String), - /// Field appends a suffix: leaf.name() = accumulated + suffix - Append(String), - /// Field IS the accumulated name (no modification) - Identity, - /// Field sets a new base name (used at pattern entry points) - SetBase(String), -} - -/// Metadata extracted from brk_query for client generation. -#[derive(Debug)] -pub struct ClientMetadata { - /// The catalog tree structure (with schemas in leaves) - pub catalog: brk_types::TreeNode, - /// Structural patterns - tree node shapes that repeat - pub structural_patterns: Vec, - /// All indexes used across the catalog - pub used_indexes: BTreeSet, - /// Index set patterns - sets of indexes that appear together on metrics - pub index_set_patterns: Vec, - /// Maps concrete field signatures to pattern names - concrete_to_pattern: HashMap, String>, - /// Maps concrete field signatures to their type parameter (for generic patterns) - concrete_to_type_param: HashMap, String>, -} - -impl ClientMetadata { - /// Extract metadata from brk_query::Vecs. - pub fn from_vecs(vecs: &Vecs) -> Self { - let catalog = vecs.catalog().clone(); - let (structural_patterns, concrete_to_pattern, concrete_to_type_param) = - patterns::detect_structural_patterns(&catalog); - let (used_indexes, index_set_patterns) = tree::detect_index_patterns(&catalog); - - ClientMetadata { - catalog, - structural_patterns, - used_indexes, - index_set_patterns, - concrete_to_pattern, - concrete_to_type_param, - } - } - - /// Find an index set pattern that matches the given indexes. - pub fn find_index_set_pattern(&self, indexes: &BTreeSet) -> Option<&IndexSetPattern> { - self.index_set_patterns - .iter() - .find(|p| &p.indexes == indexes) - } - - /// Check if a type is a structural pattern name. - pub fn is_pattern_type(&self, type_name: &str) -> bool { - self.structural_patterns.iter().any(|p| p.name == type_name) - } - - /// Find a pattern by name. - pub fn find_pattern(&self, name: &str) -> Option<&StructuralPattern> { - self.structural_patterns.iter().find(|p| p.name == name) - } - - /// Check if a pattern is generic. - pub fn is_pattern_generic(&self, name: &str) -> bool { - self.find_pattern(name).is_some_and(|p| p.is_generic) - } - - /// Get the type parameter for a generic pattern given its concrete fields. - pub fn get_type_param(&self, fields: &[PatternField]) -> Option<&String> { - self.concrete_to_type_param.get(fields) - } - - /// Build a lookup map from field signatures to pattern names. - pub fn pattern_lookup(&self) -> HashMap, String> { - let mut lookup = self.concrete_to_pattern.clone(); - for p in &self.structural_patterns { - lookup.insert(p.fields.clone(), p.name.clone()); - } - lookup - } - - /// Check if a field should use a shared index accessor. - pub fn field_uses_accessor(&self, field: &PatternField) -> bool { - self.find_index_set_pattern(&field.indexes).is_some() - } -} - -/// A pattern of indexes that appear together on multiple metrics. -#[derive(Debug, Clone)] -pub struct IndexSetPattern { - /// Pattern name (e.g., "DateHeightIndexes") - pub name: String, - /// The set of indexes - pub indexes: BTreeSet, -} - -/// A structural pattern - a branch structure that appears multiple times. -#[derive(Debug, Clone)] -pub struct StructuralPattern { - /// Pattern name - pub name: String, - /// Ordered list of child fields - pub fields: Vec, - /// How each field modifies the accumulated name - pub field_positions: HashMap, - /// If true, all leaf fields use a type parameter T - pub is_generic: bool, -} - -impl StructuralPattern { - /// Returns true if this pattern contains any leaf fields. - pub fn contains_leaves(&self) -> bool { - self.fields.iter().any(|f| f.is_leaf()) - } - - /// Returns true if all leaf fields have consistent name transformations. - pub fn is_parameterizable(&self) -> bool { - !self.field_positions.is_empty() - && self - .fields - .iter() - .all(|f| f.is_branch() || self.field_positions.contains_key(&f.name)) - } - - /// Get the field position for a given field name. - pub fn get_field_position(&self, field_name: &str) -> Option<&FieldNamePosition> { - self.field_positions.get(field_name) - } -} - -/// A field in a structural pattern. -#[derive(Debug, Clone, PartialOrd, Ord)] -pub struct PatternField { - /// Field name - pub name: String, - /// Rust type for leaves or pattern name for branches - pub rust_type: String, - /// JSON type from schema - pub json_type: String, - /// For leaves: the set of supported indexes. Empty for branches. - pub indexes: BTreeSet, - /// For branches referencing generic patterns: the concrete type parameter - pub type_param: Option, -} - -impl PatternField { - /// Returns true if this is a leaf field (has indexes). - pub fn is_leaf(&self) -> bool { - !self.indexes.is_empty() - } - - /// Returns true if this is a branch field (no indexes). - pub fn is_branch(&self) -> bool { - self.indexes.is_empty() - } -} - -impl std::hash::Hash for PatternField { - fn hash(&self, state: &mut H) { - self.name.hash(state); - self.rust_type.hash(state); - self.json_type.hash(state); - // Note: child_fields not included in hash for pattern matching purposes - } -} - -impl PartialEq for PatternField { - fn eq(&self, other: &Self) -> bool { - self.name == other.name - && self.rust_type == other.rust_type - && self.json_type == other.json_type - // Note: child_fields not included in equality for pattern matching purposes - } -} - -impl Eq for PatternField {} diff --git a/crates/brk_binder/src/types/patterns.rs b/crates/brk_binder/src/types/patterns.rs deleted file mode 100644 index 45f522de8..000000000 --- a/crates/brk_binder/src/types/patterns.rs +++ /dev/null @@ -1,435 +0,0 @@ -use std::collections::{BTreeSet, HashMap}; - -use brk_types::TreeNode; - -use super::{ - FieldNamePosition, PatternField, StructuralPattern, case::to_pascal_case, - schema::schema_to_json_type, - tree::{get_first_leaf_name, get_node_fields}, -}; - -/// Detect structural patterns in the tree using a bottom-up approach. -/// Returns (patterns, concrete_to_pattern, concrete_to_type_param). -pub fn detect_structural_patterns( - tree: &TreeNode, -) -> ( - Vec, - HashMap, String>, - HashMap, String>, -) { - let mut signature_to_pattern: HashMap, String> = HashMap::new(); - let mut signature_counts: HashMap, usize> = HashMap::new(); - let mut normalized_to_name: HashMap, String> = HashMap::new(); - let mut name_counts: HashMap = HashMap::new(); - let mut signature_to_child_fields: HashMap, Vec>> = - HashMap::new(); - - resolve_branch_patterns( - tree, - "root", - &mut signature_to_pattern, - &mut signature_counts, - &mut normalized_to_name, - &mut name_counts, - &mut signature_to_child_fields, - ); - - let (generic_patterns, generic_mappings, type_mappings) = - detect_generic_patterns(&signature_to_pattern); - - let mut patterns: Vec = signature_to_pattern - .iter() - .filter(|(sig, _)| { - signature_counts.get(*sig).copied().unwrap_or(0) >= 2 - && !generic_mappings.contains_key(*sig) - }) - .map(|(fields, name)| { - let child_fields_list = signature_to_child_fields.get(fields); - let fields_with_type_params = fields - .iter() - .enumerate() - .map(|(i, f)| { - let type_param = child_fields_list - .and_then(|list| list.get(i)) - .and_then(|cf| type_mappings.get(cf).cloned()); - PatternField { - type_param, - ..f.clone() - } - }) - .collect(); - StructuralPattern { - name: name.clone(), - fields: fields_with_type_params, - field_positions: HashMap::new(), - is_generic: false, - } - }) - .collect(); - - patterns.extend(generic_patterns); - - let mut pattern_lookup: HashMap, String> = HashMap::new(); - for (sig, name) in &signature_to_pattern { - if signature_counts.get(sig).copied().unwrap_or(0) >= 2 { - pattern_lookup.insert(sig.clone(), name.clone()); - } - } - pattern_lookup.extend(generic_mappings.clone()); - - let concrete_to_pattern = pattern_lookup.clone(); - - analyze_pattern_field_positions(tree, &mut patterns, &pattern_lookup); - - patterns.sort_by(|a, b| b.fields.len().cmp(&a.fields.len())); - (patterns, concrete_to_pattern, type_mappings) -} - -/// Detect generic patterns by grouping signatures by their normalized form. -/// Returns (patterns, concrete_to_pattern, concrete_to_type_param). -fn detect_generic_patterns( - signature_to_pattern: &HashMap, String>, -) -> ( - Vec, - HashMap, String>, - HashMap, String>, -) { - // Group by normalized form, tracking the extracted type for each concrete signature - let mut normalized_groups: HashMap< - Vec, - Vec<(Vec, String, String)>, - > = HashMap::new(); - - for (fields, name) in signature_to_pattern { - if let Some((normalized, extracted_type)) = normalize_fields_for_generic(fields) { - normalized_groups - .entry(normalized) - .or_default() - .push((fields.clone(), name.clone(), extracted_type)); - } - } - - let mut patterns = Vec::new(); - let mut pattern_mappings: HashMap, String> = HashMap::new(); - let mut type_mappings: HashMap, String> = HashMap::new(); - - for (normalized_fields, group) in normalized_groups { - if group.len() >= 2 { - let generic_name = group[0].1.clone(); - for (concrete_fields, _, extracted_type) in &group { - pattern_mappings.insert(concrete_fields.clone(), generic_name.clone()); - type_mappings.insert(concrete_fields.clone(), extracted_type.clone()); - } - patterns.push(StructuralPattern { - name: generic_name, - fields: normalized_fields, - field_positions: HashMap::new(), - is_generic: true, - }); - } - } - - (patterns, pattern_mappings, type_mappings) -} - -/// Normalize fields by replacing concrete value types with "T". -/// Returns (normalized_fields, extracted_type) where extracted_type is the concrete type replaced. -fn normalize_fields_for_generic(fields: &[PatternField]) -> Option<(Vec, String)> { - let leaf_types: Vec<&str> = fields - .iter() - .filter(|f| f.is_leaf()) - .map(|f| f.rust_type.as_str()) - .collect(); - - if leaf_types.is_empty() { - return None; - } - - let first_type = leaf_types[0]; - if !leaf_types.iter().all(|t| *t == first_type) { - return None; - } - - let normalized = fields - .iter() - .map(|f| { - if f.is_branch() { - f.clone() - } else { - PatternField { - name: f.name.clone(), - rust_type: "T".to_string(), - json_type: "T".to_string(), - indexes: f.indexes.clone(), - type_param: None, - } - } - }) - .collect(); - - Some((normalized, super::extract_inner_type(first_type))) -} - -/// Recursively resolve branch patterns bottom-up. -/// Returns (pattern_name, fields) for parent's child_fields tracking. -fn resolve_branch_patterns( - node: &TreeNode, - field_name: &str, - signature_to_pattern: &mut HashMap, String>, - signature_counts: &mut HashMap, usize>, - normalized_to_name: &mut HashMap, String>, - name_counts: &mut HashMap, - signature_to_child_fields: &mut HashMap, Vec>>, -) -> Option<(String, Vec)> { - let TreeNode::Branch(children) = node else { - return None; - }; - - let mut fields: Vec = Vec::new(); - let mut child_fields_vec: Vec> = Vec::new(); - - for (child_name, child_node) in children { - let (rust_type, json_type, indexes, child_fields) = match child_node { - TreeNode::Leaf(leaf) => ( - leaf.value_type().to_string(), - schema_to_json_type(&leaf.schema), - leaf.indexes().clone(), - Vec::new(), - ), - TreeNode::Branch(_) => { - let (pattern_name, child_pattern_fields) = resolve_branch_patterns( - child_node, - child_name, - signature_to_pattern, - signature_counts, - normalized_to_name, - name_counts, - signature_to_child_fields, - ) - .unwrap_or_else(|| ("Unknown".to_string(), Vec::new())); - ( - pattern_name.clone(), - pattern_name, - BTreeSet::new(), - child_pattern_fields, - ) - } - }; - fields.push(PatternField { - name: child_name.clone(), - rust_type, - json_type, - indexes, - type_param: None, - }); - child_fields_vec.push(child_fields); - } - - fields.sort_by(|a, b| a.name.cmp(&b.name)); - *signature_counts.entry(fields.clone()).or_insert(0) += 1; - - // Store child fields for type param resolution later - signature_to_child_fields - .entry(fields.clone()) - .or_insert(child_fields_vec); - - let pattern_name = if let Some(existing) = signature_to_pattern.get(&fields) { - existing.clone() - } else { - let normalized = normalize_fields_for_naming(&fields); - let name = normalized_to_name - .entry(normalized) - .or_insert_with(|| generate_pattern_name(field_name, name_counts)) - .clone(); - signature_to_pattern.insert(fields.clone(), name.clone()); - name - }; - - Some((pattern_name, fields)) -} - -/// Normalize fields for naming (same structure = same name). -fn normalize_fields_for_naming(fields: &[PatternField]) -> Vec { - fields - .iter() - .map(|f| { - if f.is_branch() { - f.clone() - } else { - PatternField { - name: f.name.clone(), - rust_type: "_".to_string(), - json_type: "_".to_string(), - indexes: f.indexes.clone(), - type_param: None, - } - } - }) - .collect() -} - -/// Generate a unique pattern name. -fn generate_pattern_name(field_name: &str, name_counts: &mut HashMap) -> String { - let pascal = to_pascal_case(field_name); - let sanitized = if pascal.chars().next().is_some_and(|c| c.is_ascii_digit()) { - format!("_{}", pascal) - } else { - pascal - }; - - let base_name = format!("{}Pattern", sanitized); - let count = name_counts.entry(base_name.clone()).or_insert(0); - *count += 1; - - if *count == 1 { - base_name - } else { - format!("{}{}", base_name, count) - } -} - -// Field position analysis - -fn analyze_pattern_field_positions( - tree: &TreeNode, - patterns: &mut [StructuralPattern], - pattern_lookup: &HashMap, String>, -) { - let mut instances: HashMap> = HashMap::new(); - collect_pattern_instances(tree, "", &mut instances, pattern_lookup); - - for pattern in patterns.iter_mut() { - if let Some(pattern_instances) = instances.get(&pattern.name) { - pattern.field_positions = analyze_field_positions_from_instances(pattern_instances); - } - } -} - -fn collect_pattern_instances( - node: &TreeNode, - accumulated_name: &str, - instances: &mut HashMap>, - pattern_lookup: &HashMap, String>, -) { - let TreeNode::Branch(children) = node else { - return; - }; - - let fields = get_node_fields(children, pattern_lookup); - if let Some(pattern_name) = pattern_lookup.get(&fields) { - for (field_name, child_node) in children { - if let TreeNode::Leaf(leaf) = child_node { - instances.entry(pattern_name.clone()).or_default().push(( - accumulated_name.to_string(), - field_name.clone(), - leaf.name().to_string(), - )); - } - } - } - - for (field_name, child_node) in children { - let child_accumulated = match child_node { - TreeNode::Leaf(leaf) => leaf.name().to_string(), - TreeNode::Branch(_) => { - if let Some(desc_leaf_name) = get_first_leaf_name(child_node) { - infer_accumulated_name(accumulated_name, field_name, &desc_leaf_name) - } else if accumulated_name.is_empty() { - field_name.clone() - } else { - format!("{}_{}", accumulated_name, field_name) - } - } - }; - collect_pattern_instances(child_node, &child_accumulated, instances, pattern_lookup); - } -} - -fn infer_accumulated_name(parent_acc: &str, field_name: &str, descendant_leaf: &str) -> String { - if let Some(pos) = descendant_leaf.find(field_name) { - if pos == 0 { - return field_name.to_string(); - } - if pos > 0 && descendant_leaf.chars().nth(pos - 1) == Some('_') { - return if parent_acc.is_empty() { - field_name.to_string() - } else { - format!("{}_{}", parent_acc, field_name) - }; - } - } - - if parent_acc.is_empty() { - field_name.to_string() - } else { - format!("{}_{}", parent_acc, field_name) - } -} - -fn analyze_field_positions_from_instances( - instances: &[(String, String, String)], -) -> HashMap { - let mut field_instances: HashMap> = HashMap::new(); - for (acc, field, leaf) in instances { - field_instances - .entry(field.clone()) - .or_default() - .push((acc.clone(), leaf.clone())); - } - - let mut positions = HashMap::new(); - for (field_name, field_data) in field_instances { - if let Some(position) = detect_field_position(&field_data) { - positions.insert(field_name, position); - } - } - positions -} - -fn detect_field_position(data: &[(String, String)]) -> Option { - if data.is_empty() { - return None; - } - - let (first_acc, first_leaf) = &data[0]; - - // Identity - if first_acc == first_leaf { - return Some(FieldNamePosition::Identity); - } - - // Append - if let Some(suffix) = first_leaf.strip_prefix(first_acc.as_str()) { - let suffix = suffix.to_string(); - if data.iter().all(|(acc, leaf)| { - if acc.is_empty() { - leaf == suffix.trim_start_matches('_') - } else { - leaf.strip_prefix(acc.as_str()) == Some(&suffix) - } - }) { - return Some(FieldNamePosition::Append(suffix)); - } - } - - // Prepend - if let Some(prefix) = first_leaf.strip_suffix(first_acc.as_str()) { - let prefix = prefix.to_string(); - if data.iter().all(|(acc, leaf)| { - if acc.is_empty() { - leaf == prefix.trim_end_matches('_') - } else { - leaf.strip_suffix(acc.as_str()) == Some(&prefix) - } - }) { - return Some(FieldNamePosition::Prepend(prefix)); - } - } - - // SetBase - if first_acc.is_empty() { - return Some(FieldNamePosition::SetBase(first_leaf.clone())); - } - - None -} diff --git a/crates/brk_binder/src/types/tree.rs b/crates/brk_binder/src/types/tree.rs deleted file mode 100644 index 8782363a4..000000000 --- a/crates/brk_binder/src/types/tree.rs +++ /dev/null @@ -1,158 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet, HashMap}; - -use brk_types::{Index, TreeNode}; - -use super::{PatternField, case::to_pascal_case, schema::schema_to_json_type}; - -/// Get the first leaf name from a tree node. -pub fn get_first_leaf_name(node: &TreeNode) -> Option { - match node { - TreeNode::Leaf(leaf) => Some(leaf.name().to_string()), - TreeNode::Branch(children) => children.values().find_map(get_first_leaf_name), - } -} - -/// Get the metric base for a pattern instance by analyzing the first leaf descendant. -pub fn get_pattern_instance_base(node: &TreeNode, field_name: &str) -> String { - if let Some(leaf_name) = get_first_leaf_name(node) - && leaf_name.contains(field_name) - { - return field_name.to_string(); - } - field_name.to_string() -} - -/// Get the field signature for a branch node's children. -pub fn get_node_fields( - children: &BTreeMap, - pattern_lookup: &HashMap, String>, -) -> Vec { - let mut fields: Vec = children - .iter() - .map(|(name, node)| { - let (rust_type, json_type, indexes) = match node { - TreeNode::Leaf(leaf) => ( - leaf.value_type().to_string(), - schema_to_json_type(&leaf.schema), - leaf.indexes().clone(), - ), - TreeNode::Branch(grandchildren) => { - let child_fields = get_node_fields(grandchildren, pattern_lookup); - let pattern_name = pattern_lookup - .get(&child_fields) - .cloned() - .unwrap_or_else(|| "Unknown".to_string()); - (pattern_name.clone(), pattern_name, BTreeSet::new()) - } - }; - PatternField { - name: name.clone(), - rust_type, - json_type, - indexes, - type_param: None, - } - }) - .collect(); - fields.sort_by(|a, b| a.name.cmp(&b.name)); - fields -} - -/// Get fields with child field information for generic pattern lookup. -/// Returns (field, child_fields) pairs where child_fields is Some for branches. -pub fn get_fields_with_child_info( - children: &BTreeMap, - parent_name: &str, - pattern_lookup: &HashMap, String>, -) -> Vec<(PatternField, Option>)> { - children - .iter() - .map(|(name, node)| { - let (rust_type, json_type, indexes, child_fields) = match node { - TreeNode::Leaf(leaf) => ( - leaf.value_type().to_string(), - schema_to_json_type(&leaf.schema), - leaf.indexes().clone(), - None, - ), - TreeNode::Branch(grandchildren) => { - let child_fields = get_node_fields(grandchildren, pattern_lookup); - let pattern_name = pattern_lookup - .get(&child_fields) - .cloned() - .unwrap_or_else(|| format!("{}_{}", parent_name, to_pascal_case(name))); - ( - pattern_name.clone(), - pattern_name, - BTreeSet::new(), - Some(child_fields), - ) - } - }; - ( - PatternField { - name: name.clone(), - rust_type, - json_type, - indexes, - type_param: None, - }, - child_fields, - ) - }) - .collect() -} - -/// Detect index patterns (sets of indexes that appear together on multiple metrics). -pub fn detect_index_patterns(tree: &TreeNode) -> (BTreeSet, Vec) { - let mut used_indexes: BTreeSet = BTreeSet::new(); - let mut index_sets: Vec> = Vec::new(); - - collect_indexes_from_tree(tree, &mut used_indexes, &mut index_sets); - - // Count occurrences of each unique index set - let mut index_set_counts: Vec<(BTreeSet, usize)> = Vec::new(); - for index_set in index_sets { - if let Some(entry) = index_set_counts.iter_mut().find(|(s, _)| s == &index_set) { - entry.1 += 1; - } else { - index_set_counts.push((index_set, 1)); - } - } - - // Build patterns for index sets appearing 2+ times - let mut patterns: Vec = index_set_counts - .into_iter() - .filter(|(indexes, count)| *count >= 2 && !indexes.is_empty()) - .enumerate() - .map(|(i, (indexes, _))| super::IndexSetPattern { - name: if i == 0 { - "Indexes".to_string() - } else { - format!("Indexes{}", i + 1) - }, - indexes, - }) - .collect(); - - patterns.sort_by(|a, b| b.indexes.len().cmp(&a.indexes.len())); - (used_indexes, patterns) -} - -fn collect_indexes_from_tree( - node: &TreeNode, - used_indexes: &mut BTreeSet, - index_sets: &mut Vec>, -) { - match node { - TreeNode::Leaf(leaf) => { - used_indexes.extend(leaf.indexes().iter().cloned()); - index_sets.push(leaf.indexes().clone()); - } - TreeNode::Branch(children) => { - for child in children.values() { - collect_indexes_from_tree(child, used_indexes, index_sets); - } - } - } -} diff --git a/crates/brk_binder/.gitignore b/crates/brk_bindgen/.gitignore similarity index 100% rename from crates/brk_binder/.gitignore rename to crates/brk_bindgen/.gitignore diff --git a/crates/brk_binder/Cargo.toml b/crates/brk_bindgen/Cargo.toml similarity index 80% rename from crates/brk_binder/Cargo.toml rename to crates/brk_bindgen/Cargo.toml index 080d7e6d8..59bd00fa1 100644 --- a/crates/brk_binder/Cargo.toml +++ b/crates/brk_bindgen/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "brk_binder" -description = "A generator of binding files for other languages" +name = "brk_bindgen" +description = "A trait-based generator of client bindings for multiple languages" version.workspace = true edition.workspace = true license.workspace = true diff --git a/crates/brk_binder/DESIGN.md b/crates/brk_bindgen/DESIGN.md similarity index 92% rename from crates/brk_binder/DESIGN.md rename to crates/brk_bindgen/DESIGN.md index 87d299eef..6ba0bdfca 100644 --- a/crates/brk_binder/DESIGN.md +++ b/crates/brk_bindgen/DESIGN.md @@ -1,4 +1,4 @@ -# brk_binder Design Document +# brk_bindgen Design Document ## Goal @@ -16,14 +16,14 @@ Generate typed API clients for **Rust, JavaScript, and Python** with: 4. **schemars integration**: JSON schemas embedded in `MetricLeafWithSchema` for type info 5. **Tree navigation**: `client.tree.blocks.difficulty.fetch()` pattern 6. **OpenAPI integration**: All GET endpoints generate typed methods -7. **Server integration**: brk_server calls brk_binder on startup (when clients/ dir exists) +7. **Server integration**: brk_server calls brk_bindgen on startup (when clients/ dir exists) ### Generated Output -When `crates/brk_binder/clients/` directory exists, running the server generates: +When `crates/brk_bindgen/clients/` directory exists, running the server generates: ``` -crates/brk_binder/clients/ +crates/brk_bindgen/clients/ ├── javascript/ │ └── client.js # JS + JSDoc with tree + API methods ├── python/ @@ -186,8 +186,8 @@ pub struct MetricLeafWithSchema { 1. brk_server creates OpenAPI spec via aide 2. On startup, serializes spec to JSON string -3. Passes JSON to `brk_binder::generate_clients()` -4. brk_binder parses with `oas3` crate (supports OpenAPI 3.1) +3. Passes JSON to `brk_bindgen::generate_clients()` +4. brk_bindgen parses with `oas3` crate (supports OpenAPI 3.1) 5. Generates typed methods for all GET endpoints ### Why oas3? @@ -203,7 +203,7 @@ The `oas3` crate supports OpenAPI 3.1.x parsing. - [x] vecdb: Add optional `schemars` feature with `AnySchemaVec` trait - [x] brk_types: Enhance `TreeNode::Leaf` to include `MetricLeafWithSchema` - [x] brk_traversable: Update all `to_tree_node()` with schemars integration -- [x] brk_binder: Set up generator module structure +- [x] brk_bindgen: Set up generator module structure ### Phase 1: JavaScript Client ✅ COMPLETE @@ -216,7 +216,7 @@ The `oas3` crate supports OpenAPI 3.1.x parsing. ### Phase 2: OpenAPI Integration ✅ COMPLETE - [x] Add `oas3` crate dependency (OpenAPI 3.1 support) -- [x] brk_server passes OpenAPI JSON to brk_binder on startup +- [x] brk_server passes OpenAPI JSON to brk_bindgen on startup - [x] Parse OpenAPI spec and extract endpoint definitions - [x] Generate typed methods for each GET endpoint @@ -246,7 +246,7 @@ The `oas3` crate supports OpenAPI 3.1.x parsing. ## File Structure ``` -crates/brk_binder/ +crates/brk_bindgen/ ├── src/ │ ├── lib.rs │ ├── js.rs # JS constants generation (existing) @@ -267,7 +267,7 @@ crates/brk_binder/ crates/brk_server/ └── src/ - ├── lib.rs # Calls brk_binder::generate_clients() on startup + ├── lib.rs # Calls brk_bindgen::generate_clients() on startup └── api/ └── openapi.rs # create_openapi() for aide ``` @@ -289,7 +289,7 @@ To generate clients: ```bash # Create the output directory -mkdir -p crates/brk_binder/clients +mkdir -p crates/brk_bindgen/clients # Run the server (generates clients on startup) cargo run -p brk_server diff --git a/crates/brk_binder/README.md b/crates/brk_bindgen/README.md similarity index 95% rename from crates/brk_binder/README.md rename to crates/brk_bindgen/README.md index ca05370fa..23b11c115 100644 --- a/crates/brk_binder/README.md +++ b/crates/brk_bindgen/README.md @@ -1,4 +1,4 @@ -# brk_binder +# brk_bindgen Code generation for BRK client libraries. @@ -17,7 +17,7 @@ Generate typed client libraries for Rust, JavaScript/TypeScript, and Python from ## Core API ```rust,ignore -use brk_binder::{generate_clients, ClientOutputPaths}; +use brk_bindgen::{generate_clients, ClientOutputPaths}; let paths = ClientOutputPaths::new() .rust("crates/brk_client/src/lib.rs") diff --git a/crates/brk_binder/build.rs b/crates/brk_bindgen/build.rs similarity index 100% rename from crates/brk_binder/build.rs rename to crates/brk_bindgen/build.rs diff --git a/crates/brk_bindgen/src/analysis/mod.rs b/crates/brk_bindgen/src/analysis/mod.rs new file mode 100644 index 000000000..471d3bde6 --- /dev/null +++ b/crates/brk_bindgen/src/analysis/mod.rs @@ -0,0 +1,14 @@ +//! Analysis module for name deconstruction and pattern detection. +//! +//! This module implements bottom-up analysis of vec names to detect +//! common denominators (prefixes/suffixes) and field positions. + +mod names; +mod patterns; +mod positions; +mod tree; + +pub use names::*; +pub use patterns::*; +pub use positions::*; +pub use tree::*; diff --git a/crates/brk_bindgen/src/analysis/names.rs b/crates/brk_bindgen/src/analysis/names.rs new file mode 100644 index 000000000..54751e86d --- /dev/null +++ b/crates/brk_bindgen/src/analysis/names.rs @@ -0,0 +1,451 @@ +//! Vec name deconstruction and reconstruction logic. +//! +//! This module analyzes vec names bottom-up to detect common denominators +//! (prefixes or suffixes) and field positions for pattern instances. + +use std::collections::HashMap; + +use crate::FieldNamePosition; + +/// Common denominator found across children's effective names. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum CommonDenominator { + /// Children share this prefix. Fields append their unique suffix. + /// Example: children are ["addrs_0sats", "addrs_1sats"], common = "addrs_" + Prefix(String), + /// Children share this suffix. Fields prepend their unique prefix. + /// Example: children are ["cumulative_supply", "net_supply"], common = "_supply" + Suffix(String), + /// No common part found. Fields use Identity (field = base). + None, +} + +/// Result of analyzing a pattern level. +#[derive(Debug, Clone)] +pub struct PatternAnalysis { + /// The common prefix/suffix found across all children. + pub common: CommonDenominator, + /// What's left after stripping the common part (passed to parent). + pub base: String, + /// How each field modifies the accumulated name. + pub field_positions: HashMap, +} + +/// Analyze a pattern level using child effective names. +/// +/// This is the core algorithm that detects common prefix/suffix and +/// determines field positions for each child. +/// +/// # Arguments +/// * `child_names` - Vec of (field_name, effective_name) pairs +/// where effective_name is either: +/// - For leaves: the leaf's vec name +/// - For branches: the base returned by analyzing that branch +pub fn analyze_pattern_level(child_names: &[(String, String)]) -> PatternAnalysis { + if child_names.is_empty() { + return PatternAnalysis { + common: CommonDenominator::None, + base: String::new(), + field_positions: HashMap::new(), + }; + } + + if child_names.len() == 1 { + let (field_name, effective) = &child_names[0]; + let mut positions = HashMap::new(); + + // Try suffix match: effective ends with "_fieldname" + let suffix_pattern = format!("_{}", field_name); + if let Some(base) = effective.strip_suffix(&suffix_pattern) { + positions.insert( + field_name.clone(), + FieldNamePosition::Append(suffix_pattern), + ); + return PatternAnalysis { + common: CommonDenominator::None, + base: base.to_string(), + field_positions: positions, + }; + } + + // Try prefix match: effective starts with "fieldname_" + let prefix_pattern = format!("{}_", field_name); + if let Some(base) = effective.strip_prefix(&prefix_pattern) { + positions.insert( + field_name.clone(), + FieldNamePosition::Prepend(prefix_pattern), + ); + return PatternAnalysis { + common: CommonDenominator::None, + base: base.to_string(), + field_positions: positions, + }; + } + + // Field equals effective OR field doesn't appear → Identity + // Root-level instances where field == effective are handled by + // passing empty `acc` and conditional position expressions + positions.insert(field_name.clone(), FieldNamePosition::Identity); + return PatternAnalysis { + common: CommonDenominator::None, + base: effective.clone(), + field_positions: positions, + }; + } + + let effective_names: Vec<&str> = child_names.iter().map(|(_, n)| n.as_str()).collect(); + + // Try to find common prefix first + if let Some(prefix) = find_common_prefix(&effective_names) + && !prefix.is_empty() + { + let base = prefix.trim_end_matches('_').to_string(); + let mut positions = HashMap::new(); + for (field_name, effective) in child_names { + // If effective equals the base (prefix without underscore), use Identity + if effective == &base { + positions.insert(field_name.clone(), FieldNamePosition::Identity); + } else if let Some(suffix) = effective.strip_prefix(&prefix) { + // Normal case: effective has the full prefix + let suffix_with_underscore = if suffix.starts_with('_') { + suffix.to_string() + } else { + format!("_{}", suffix) + }; + positions.insert( + field_name.clone(), + FieldNamePosition::Append(suffix_with_underscore), + ); + } else { + // Fallback: use Identity if strip_prefix fails unexpectedly + positions.insert(field_name.clone(), FieldNamePosition::Identity); + } + } + return PatternAnalysis { + common: CommonDenominator::Prefix(prefix), + base, + field_positions: positions, + }; + } + + // Try to find common suffix + if let Some(suffix) = find_common_suffix(&effective_names) + && !suffix.is_empty() + { + let mut positions = HashMap::new(); + for (field_name, effective) in child_names { + let prefix = effective + .strip_suffix(&suffix) + .unwrap_or(effective) + .to_string(); + let prefix_with_underscore = if prefix.ends_with('_') { + prefix + } else { + format!("{}_", prefix) + }; + positions.insert( + field_name.clone(), + FieldNamePosition::Prepend(prefix_with_underscore), + ); + } + let base = suffix.trim_start_matches('_').to_string(); + return PatternAnalysis { + common: CommonDenominator::Suffix(suffix), + base, + field_positions: positions, + }; + } + + // No common part - use Identity for all fields + let mut positions = HashMap::new(); + for (field_name, _) in child_names { + positions.insert(field_name.clone(), FieldNamePosition::Identity); + } + + // Use the first name as base (they're all independent) + let base = child_names + .first() + .map(|(_, n)| n.clone()) + .unwrap_or_default(); + + PatternAnalysis { + common: CommonDenominator::None, + base, + field_positions: positions, + } +} + +/// Find the longest common prefix among all strings. +/// The prefix must end at an underscore boundary for semantic coherence. +fn find_common_prefix(names: &[&str]) -> Option { + if names.is_empty() { + return None; + } + + let first = names[0]; + if first.is_empty() { + return None; + } + + // Find character-by-character common prefix + let mut prefix_len = 0; + for (i, ch) in first.chars().enumerate() { + if names.iter().all(|n| n.chars().nth(i) == Some(ch)) { + prefix_len = i + 1; + } else { + break; + } + } + + if prefix_len == 0 { + return None; + } + + let raw_prefix = &first[..prefix_len]; + + // If raw_prefix exactly matches one of the names, it's a complete metric name. + // In this case, return it with trailing underscore to preserve the full name. + if names.contains(&raw_prefix) { + return Some(format!("{}_", raw_prefix)); + } + + // Find the last underscore position to get a clean boundary + // Prefer ending at an underscore for semantic coherence + if let Some(last_underscore) = raw_prefix.rfind('_') + && last_underscore > 0 + { + let clean_prefix = &first[..=last_underscore]; + // Verify this still works for all names + if names.iter().all(|n| n.starts_with(clean_prefix)) { + return Some(clean_prefix.to_string()); + } + } + + // If no underscore boundary works, the full prefix must end at an underscore + if raw_prefix.ends_with('_') { + return Some(raw_prefix.to_string()); + } + + None +} + +/// Find the longest common suffix among all strings. +/// The suffix must start at an underscore boundary for semantic coherence. +fn find_common_suffix(names: &[&str]) -> Option { + if names.is_empty() { + return None; + } + + let first = names[0]; + if first.is_empty() { + return None; + } + + // Find character-by-character common suffix (from the end) + let first_chars: Vec = first.chars().collect(); + let mut suffix_len = 0; + + for i in 0..first_chars.len() { + let idx_from_end = first_chars.len() - 1 - i; + let ch = first_chars[idx_from_end]; + + let all_match = names.iter().all(|n| { + let n_chars: Vec = n.chars().collect(); + if i >= n_chars.len() { + return false; + } + n_chars[n_chars.len() - 1 - i] == ch + }); + + if all_match { + suffix_len = i + 1; + } else { + break; + } + } + + if suffix_len == 0 { + return None; + } + + let raw_suffix = &first[first.len() - suffix_len..]; + + // Find the first underscore position to get a clean boundary + if let Some(first_underscore) = raw_suffix.find('_') + && first_underscore < raw_suffix.len() - 1 + { + let clean_suffix = &raw_suffix[first_underscore..]; + // Verify this still works for all names + if names.iter().all(|n| n.ends_with(clean_suffix)) { + return Some(clean_suffix.to_string()); + } + } + + // If no underscore boundary works, the full suffix must start with underscore + if raw_suffix.starts_with('_') { + return Some(raw_suffix.to_string()); + } + + None +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_common_prefix() { + let names = vec!["addrs_0sats", "addrs_1sats", "addrs_2sats"]; + assert_eq!(find_common_prefix(&names), Some("addrs_".to_string())); + } + + #[test] + fn test_common_suffix() { + let names = vec!["cumulative_supply", "net_supply", "total_supply"]; + assert_eq!(find_common_suffix(&names), Some("_supply".to_string())); + } + + #[test] + fn test_no_common() { + let names = vec!["foo", "bar", "baz"]; + assert_eq!(find_common_prefix(&names), None); + assert_eq!(find_common_suffix(&names), None); + } + + #[test] + fn test_analyze_pattern_level_prefix() { + let children = vec![ + ("_0sats".to_string(), "addrs_0sats".to_string()), + ("_1sats".to_string(), "addrs_1sats".to_string()), + ]; + let analysis = analyze_pattern_level(&children); + + assert!(matches!(analysis.common, CommonDenominator::Prefix(_))); + assert_eq!(analysis.base, "addrs"); + assert!(matches!( + analysis.field_positions.get("_0sats"), + Some(FieldNamePosition::Append(_)) + )); + } + + #[test] + fn test_analyze_pattern_level_suffix() { + let children = vec![ + ("cumulative".to_string(), "cumulative_supply".to_string()), + ("net".to_string(), "net_supply".to_string()), + ]; + let analysis = analyze_pattern_level(&children); + + assert!(matches!(analysis.common, CommonDenominator::Suffix(_))); + assert_eq!(analysis.base, "supply"); + assert!(matches!( + analysis.field_positions.get("cumulative"), + Some(FieldNamePosition::Prepend(_)) + )); + } + + #[test] + fn test_single_child_suffix() { + // Field "count" appears as suffix "_count" in "activity_count" + let children = vec![("count".to_string(), "activity_count".to_string())]; + let analysis = analyze_pattern_level(&children); + + assert!(matches!(analysis.common, CommonDenominator::None)); + assert_eq!(analysis.base, "activity"); + assert_eq!( + analysis.field_positions.get("count"), + Some(&FieldNamePosition::Append("_count".to_string())) + ); + } + + #[test] + fn test_single_child_prefix() { + // Field "cumulative" appears as prefix "cumulative_" in "cumulative_supply" + let children = vec![("cumulative".to_string(), "cumulative_supply".to_string())]; + let analysis = analyze_pattern_level(&children); + + assert!(matches!(analysis.common, CommonDenominator::None)); + assert_eq!(analysis.base, "supply"); + assert_eq!( + analysis.field_positions.get("cumulative"), + Some(&FieldNamePosition::Prepend("cumulative_".to_string())) + ); + } + + #[test] + fn test_single_child_identity_equal() { + // Field "supply" equals effective "supply" → Identity + // (root-level handling is done via empty acc and conditional expressions) + let children = vec![("supply".to_string(), "supply".to_string())]; + let analysis = analyze_pattern_level(&children); + + assert!(matches!(analysis.common, CommonDenominator::None)); + assert_eq!(analysis.base, "supply"); + assert_eq!( + analysis.field_positions.get("supply"), + Some(&FieldNamePosition::Identity) + ); + } + + #[test] + fn test_single_child_identity_structural() { + // Field "x" doesn't appear in "a_b" - it's structural grouping + let children = vec![("x".to_string(), "a_b".to_string())]; + let analysis = analyze_pattern_level(&children); + + assert!(matches!(analysis.common, CommonDenominator::None)); + assert_eq!(analysis.base, "a_b"); // passes through unchanged + assert_eq!( + analysis.field_positions.get("x"), + Some(&FieldNamePosition::Identity) + ); + } + + #[test] + fn test_common_prefix_exact_match() { + // When one name exactly matches the common prefix, preserve the full name + // This fixes the realized_loss vs realized_count bug + let names = vec!["realized_loss", "realized_loss_cumulative"]; + assert_eq!( + find_common_prefix(&names), + Some("realized_loss_".to_string()) + ); + } + + #[test] + fn test_common_prefix_exact_match_multiple() { + // Multiple children with same base name + let names = vec!["realized_loss", "realized_loss", "realized_loss_cumulative"]; + assert_eq!( + find_common_prefix(&names), + Some("realized_loss_".to_string()) + ); + } + + #[test] + fn test_analyze_pattern_level_full_base() { + // When names are like [realized_loss, realized_loss_cumulative], + // base should be "realized_loss" not "realized" + let children = vec![ + ("sum".to_string(), "realized_loss".to_string()), + ( + "cumulative".to_string(), + "realized_loss_cumulative".to_string(), + ), + ]; + let analysis = analyze_pattern_level(&children); + + assert!(matches!(analysis.common, CommonDenominator::Prefix(_))); + assert_eq!(analysis.base, "realized_loss"); + // sum effective equals base, so position is Identity + assert_eq!( + analysis.field_positions.get("sum"), + Some(&FieldNamePosition::Identity) + ); + // cumulative has suffix "_cumulative" after the base + assert_eq!( + analysis.field_positions.get("cumulative"), + Some(&FieldNamePosition::Append("_cumulative".to_string())) + ); + } +} diff --git a/crates/brk_bindgen/src/analysis/patterns.rs b/crates/brk_bindgen/src/analysis/patterns.rs new file mode 100644 index 000000000..8d0aa7734 --- /dev/null +++ b/crates/brk_bindgen/src/analysis/patterns.rs @@ -0,0 +1,290 @@ +//! Structural pattern detection using bottom-up analysis. +//! +//! This module detects repeating tree structures and analyzes them +//! using the bottom-up name deconstruction algorithm. + +use std::collections::{BTreeSet, HashMap}; + +use brk_types::TreeNode; + +use super::analyze_all_field_positions; +use crate::{PatternField, StructuralPattern, schema_to_json_type, to_pascal_case}; + +/// Context for pattern detection, holding all intermediate state. +struct PatternContext { + /// Maps field signatures to pattern names + signature_to_pattern: HashMap, String>, + /// Counts how many times each signature appears + signature_counts: HashMap, usize>, + /// Maps normalized signatures to pattern names (for naming consistency) + normalized_to_name: HashMap, String>, + /// Counts pattern name usage (for unique naming) + name_counts: HashMap, + /// Maps signatures to their child field lists + signature_to_child_fields: HashMap, Vec>>, +} + +impl PatternContext { + fn new() -> Self { + Self { + signature_to_pattern: HashMap::new(), + signature_counts: HashMap::new(), + normalized_to_name: HashMap::new(), + name_counts: HashMap::new(), + signature_to_child_fields: HashMap::new(), + } + } +} + +/// Detect structural patterns in the tree using a bottom-up approach. +/// +/// Returns (patterns, concrete_to_pattern, concrete_to_type_param). +pub fn detect_structural_patterns( + tree: &TreeNode, +) -> ( + Vec, + HashMap, String>, + HashMap, String>, +) { + let mut ctx = PatternContext::new(); + resolve_branch_patterns(tree, "root", &mut ctx); + + let (generic_patterns, generic_mappings, type_mappings) = + detect_generic_patterns(&ctx.signature_to_pattern); + + let mut patterns: Vec = ctx.signature_to_pattern + .iter() + .filter(|(sig, _)| { + ctx.signature_counts.get(*sig).copied().unwrap_or(0) >= 2 + && !generic_mappings.contains_key(*sig) + }) + .map(|(fields, name)| { + let child_fields_list = ctx.signature_to_child_fields.get(fields); + let fields_with_type_params = fields + .iter() + .enumerate() + .map(|(i, f)| { + let type_param = child_fields_list + .and_then(|list| list.get(i)) + .and_then(|cf| type_mappings.get(cf).cloned()); + PatternField { + type_param, + ..f.clone() + } + }) + .collect(); + StructuralPattern { + name: name.clone(), + fields: fields_with_type_params, + field_positions: HashMap::new(), + is_generic: false, + } + }) + .collect(); + + patterns.extend(generic_patterns); + + let mut pattern_lookup: HashMap, String> = HashMap::new(); + for (sig, name) in &ctx.signature_to_pattern { + if ctx.signature_counts.get(sig).copied().unwrap_or(0) >= 2 { + pattern_lookup.insert(sig.clone(), name.clone()); + } + } + pattern_lookup.extend(generic_mappings.clone()); + + let concrete_to_pattern = pattern_lookup.clone(); + + // Use the new bottom-up field position analysis + analyze_all_field_positions(tree, &mut patterns, &pattern_lookup); + + patterns.sort_by(|a, b| b.fields.len().cmp(&a.fields.len())); + (patterns, concrete_to_pattern, type_mappings) +} + +/// Detect generic patterns by grouping signatures by their normalized form. +fn detect_generic_patterns( + signature_to_pattern: &HashMap, String>, +) -> ( + Vec, + HashMap, String>, + HashMap, String>, +) { + let mut normalized_groups: HashMap< + Vec, + Vec<(Vec, String, String)>, + > = HashMap::new(); + + for (fields, name) in signature_to_pattern { + if let Some((normalized, extracted_type)) = normalize_fields_for_generic(fields) { + normalized_groups + .entry(normalized) + .or_default() + .push((fields.clone(), name.clone(), extracted_type)); + } + } + + let mut patterns = Vec::new(); + let mut pattern_mappings: HashMap, String> = HashMap::new(); + let mut type_mappings: HashMap, String> = HashMap::new(); + + for (normalized_fields, group) in normalized_groups { + if group.len() >= 2 { + let generic_name = group[0].1.clone(); + for (concrete_fields, _, extracted_type) in &group { + pattern_mappings.insert(concrete_fields.clone(), generic_name.clone()); + type_mappings.insert(concrete_fields.clone(), extracted_type.clone()); + } + patterns.push(StructuralPattern { + name: generic_name, + fields: normalized_fields, + field_positions: HashMap::new(), + is_generic: true, + }); + } + } + + (patterns, pattern_mappings, type_mappings) +} + +/// Normalize fields by replacing concrete value types with "T". +fn normalize_fields_for_generic(fields: &[PatternField]) -> Option<(Vec, String)> { + let leaf_types: Vec<&str> = fields + .iter() + .filter(|f| f.is_leaf()) + .map(|f| f.rust_type.as_str()) + .collect(); + + if leaf_types.is_empty() { + return None; + } + + let first_type = leaf_types[0]; + if !leaf_types.iter().all(|t| *t == first_type) { + return None; + } + + let normalized = fields + .iter() + .map(|f| { + if f.is_branch() { + f.clone() + } else { + PatternField { + name: f.name.clone(), + rust_type: "T".to_string(), + json_type: "T".to_string(), + indexes: f.indexes.clone(), + type_param: None, + } + } + }) + .collect(); + + Some((normalized, crate::extract_inner_type(first_type))) +} + +/// Recursively resolve branch patterns bottom-up. +fn resolve_branch_patterns( + node: &TreeNode, + field_name: &str, + ctx: &mut PatternContext, +) -> Option<(String, Vec)> { + let TreeNode::Branch(children) = node else { + return None; + }; + + let mut fields: Vec = Vec::new(); + let mut child_fields_vec: Vec> = Vec::new(); + + for (child_name, child_node) in children { + let (rust_type, json_type, indexes, child_fields) = match child_node { + TreeNode::Leaf(leaf) => ( + leaf.value_type().to_string(), + schema_to_json_type(&leaf.schema), + leaf.indexes().clone(), + Vec::new(), + ), + TreeNode::Branch(_) => { + let (pattern_name, child_pattern_fields) = + resolve_branch_patterns(child_node, child_name, ctx) + .unwrap_or_else(|| ("Unknown".to_string(), Vec::new())); + ( + pattern_name.clone(), + pattern_name, + BTreeSet::new(), + child_pattern_fields, + ) + } + }; + fields.push(PatternField { + name: child_name.clone(), + rust_type, + json_type, + indexes, + type_param: None, + }); + child_fields_vec.push(child_fields); + } + + fields.sort_by(|a, b| a.name.cmp(&b.name)); + *ctx.signature_counts.entry(fields.clone()).or_insert(0) += 1; + + ctx.signature_to_child_fields + .entry(fields.clone()) + .or_insert(child_fields_vec); + + let pattern_name = if let Some(existing) = ctx.signature_to_pattern.get(&fields) { + existing.clone() + } else { + let normalized = normalize_fields_for_naming(&fields); + let name = ctx + .normalized_to_name + .entry(normalized) + .or_insert_with(|| generate_pattern_name(field_name, &mut ctx.name_counts)) + .clone(); + ctx.signature_to_pattern.insert(fields.clone(), name.clone()); + name + }; + + Some((pattern_name, fields)) +} + +/// Normalize fields for naming (same structure = same name). +fn normalize_fields_for_naming(fields: &[PatternField]) -> Vec { + fields + .iter() + .map(|f| { + if f.is_branch() { + f.clone() + } else { + PatternField { + name: f.name.clone(), + rust_type: "_".to_string(), + json_type: "_".to_string(), + indexes: f.indexes.clone(), + type_param: None, + } + } + }) + .collect() +} + +/// Generate a unique pattern name. +fn generate_pattern_name(field_name: &str, name_counts: &mut HashMap) -> String { + let pascal = to_pascal_case(field_name); + let sanitized = if pascal.chars().next().is_some_and(|c| c.is_ascii_digit()) { + format!("_{}", pascal) + } else { + pascal + }; + + let base_name = format!("{}Pattern", sanitized); + let count = name_counts.entry(base_name.clone()).or_insert(0); + *count += 1; + + if *count == 1 { + base_name + } else { + format!("{}{}", base_name, count) + } +} diff --git a/crates/brk_bindgen/src/analysis/positions.rs b/crates/brk_bindgen/src/analysis/positions.rs new file mode 100644 index 000000000..661abbf5f --- /dev/null +++ b/crates/brk_bindgen/src/analysis/positions.rs @@ -0,0 +1,120 @@ +//! Field position detection for pattern instances. +//! +//! This module bridges the name analysis with pattern field positions, +//! processing patterns bottom-up to determine how each field modifies +//! the accumulated metric name. + +use std::collections::HashMap; + +use brk_types::TreeNode; + +use super::{analyze_pattern_level, get_node_fields}; +use crate::{FieldNamePosition, PatternField, StructuralPattern}; + +/// Analyze field positions for all patterns using bottom-up tree traversal. +/// +/// This is the main entry point for field position detection. It processes +/// the tree bottom-up, analyzing each pattern instance and aggregating +/// the positions across all instances. +pub fn analyze_all_field_positions( + tree: &TreeNode, + patterns: &mut [StructuralPattern], + pattern_lookup: &HashMap, String>, +) { + let mut all_positions: HashMap>> = + HashMap::new(); + + // Collect positions from all instances bottom-up + collect_positions_bottom_up(tree, pattern_lookup, &mut all_positions); + + // Merge positions into patterns + for pattern in patterns.iter_mut() { + if let Some(field_positions) = all_positions.get(&pattern.name) { + pattern.field_positions = merge_field_positions(field_positions); + } + } +} + +/// Recursively collect field positions bottom-up. +/// Returns the effective base for this node (used by parent level). +fn collect_positions_bottom_up( + node: &TreeNode, + pattern_lookup: &HashMap, String>, + all_positions: &mut HashMap>>, +) -> Option { + match node { + TreeNode::Leaf(leaf) => { + // Leaves return their vec name as the effective base + Some(leaf.name().to_string()) + } + TreeNode::Branch(children) => { + // First, process all children recursively (bottom-up) + let mut child_bases: HashMap = HashMap::new(); + for (field_name, child_node) in children { + if let Some(base) = collect_positions_bottom_up(child_node, pattern_lookup, all_positions) { + child_bases.insert(field_name.clone(), base); + } + } + + // Build child names for this level's analysis + let child_names: Vec<(String, String)> = children + .keys() + .filter_map(|field_name| { + child_bases + .get(field_name) + .map(|base| (field_name.clone(), base.clone())) + }) + .collect(); + + if child_names.is_empty() { + return None; + } + + // Analyze this level + let analysis = analyze_pattern_level(&child_names); + + // Get the pattern name for this node (if any) + let fields = get_node_fields(children, pattern_lookup); + if let Some(pattern_name) = pattern_lookup.get(&fields) { + // Record field positions for this pattern instance + for (field_name, position) in &analysis.field_positions { + all_positions + .entry(pattern_name.clone()) + .or_default() + .entry(field_name.clone()) + .or_default() + .push(position.clone()); + } + } + + // Return our base for the parent level + Some(analysis.base) + } + } +} + +/// Merge multiple observed positions for each field into a single position. +/// Uses the first non-Identity position found, as Identity from root-level +/// instances is now handled by passing empty `acc`. +fn merge_field_positions( + field_positions: &HashMap>, +) -> HashMap { + field_positions + .iter() + .filter_map(|(field_name, positions)| { + if positions.is_empty() { + return None; + } + + // Prefer Append/Prepend over Identity, as Identity at root-level + // is handled by empty acc and conditional position expressions + let preferred = positions + .iter() + .find(|p| !matches!(p, FieldNamePosition::Identity)) + .cloned() + .unwrap_or_else(|| positions[0].clone()); + + Some((field_name.clone(), preferred)) + }) + .collect() +} diff --git a/crates/brk_bindgen/src/analysis/tree.rs b/crates/brk_bindgen/src/analysis/tree.rs new file mode 100644 index 000000000..e580177a2 --- /dev/null +++ b/crates/brk_bindgen/src/analysis/tree.rs @@ -0,0 +1,251 @@ +//! Tree traversal helpers for pattern analysis. +//! +//! This module provides utilities for working with the TreeNode structure, +//! including leaf name extraction and index pattern detection. + +use std::collections::{BTreeMap, BTreeSet, HashMap}; + +use brk_types::{Index, TreeNode}; + +use crate::{IndexSetPattern, PatternField, child_type_name, schema_to_json_type}; + +/// Get the first leaf name from a tree node. +pub fn get_first_leaf_name(node: &TreeNode) -> Option { + match node { + TreeNode::Leaf(leaf) => Some(leaf.name().to_string()), + TreeNode::Branch(children) => children.values().find_map(get_first_leaf_name), + } +} + +/// Get all leaf names from a tree node. +pub fn get_all_leaf_names(node: &TreeNode) -> Vec { + match node { + TreeNode::Leaf(leaf) => vec![leaf.name().to_string()], + TreeNode::Branch(children) => children.values().flat_map(get_all_leaf_names).collect(), + } +} + +/// Get the field signature for a branch node's children. +pub fn get_node_fields( + children: &BTreeMap, + pattern_lookup: &HashMap, String>, +) -> Vec { + let mut fields: Vec = children + .iter() + .map(|(name, node)| { + let (rust_type, json_type, indexes) = match node { + TreeNode::Leaf(leaf) => ( + leaf.value_type().to_string(), + schema_to_json_type(&leaf.schema), + leaf.indexes().clone(), + ), + TreeNode::Branch(grandchildren) => { + let child_fields = get_node_fields(grandchildren, pattern_lookup); + let pattern_name = pattern_lookup + .get(&child_fields) + .cloned() + .unwrap_or_else(|| "Unknown".to_string()); + (pattern_name.clone(), pattern_name, BTreeSet::new()) + } + }; + PatternField { + name: name.clone(), + rust_type, + json_type, + indexes, + type_param: None, + } + }) + .collect(); + fields.sort_by(|a, b| a.name.cmp(&b.name)); + fields +} + +/// Detect index patterns (sets of indexes that appear together on metrics). +pub fn detect_index_patterns(tree: &TreeNode) -> (BTreeSet, Vec) { + let mut used_indexes: BTreeSet = BTreeSet::new(); + let mut unique_index_sets: BTreeSet> = BTreeSet::new(); + + collect_indexes_from_tree(tree, &mut used_indexes, &mut unique_index_sets); + + // Sort by count (descending) then by first index name for deterministic ordering + let mut sorted_sets: Vec<_> = unique_index_sets + .into_iter() + .filter(|indexes| !indexes.is_empty()) + .collect(); + sorted_sets.sort_by(|a, b| { + b.len() + .cmp(&a.len()) + .then_with(|| a.iter().next().cmp(&b.iter().next())) + }); + + // Assign unique sequential names + let patterns: Vec = sorted_sets + .into_iter() + .enumerate() + .map(|(i, indexes)| IndexSetPattern { + name: format!("MetricPattern{}", i + 1), + indexes, + }) + .collect(); + + (used_indexes, patterns) +} + +fn collect_indexes_from_tree( + node: &TreeNode, + used_indexes: &mut BTreeSet, + unique_index_sets: &mut BTreeSet>, +) { + match node { + TreeNode::Leaf(leaf) => { + used_indexes.extend(leaf.indexes().iter().cloned()); + unique_index_sets.insert(leaf.indexes().clone()); + } + TreeNode::Branch(children) => { + for child in children.values() { + collect_indexes_from_tree(child, used_indexes, unique_index_sets); + } + } + } +} + +/// Get the metric base for a pattern instance by analyzing all leaf descendants. +/// +/// For root-level instances (no common prefix among leaves), returns empty string. +/// For cohort-level instances, returns the common prefix among all leaves. +pub fn get_pattern_instance_base(node: &TreeNode) -> String { + let leaf_names = get_all_leaf_names(node); + if leaf_names.is_empty() { + return String::new(); + } + + // Find the longest common prefix among all leaf names + let common_prefix = find_common_prefix_at_underscore(&leaf_names); + + // If no common prefix, we're at root level + if common_prefix.is_empty() { + return String::new(); + } + + // Return the common prefix (without trailing underscore) + common_prefix.trim_end_matches('_').to_string() +} + +/// Find the longest common prefix at an underscore boundary. +fn find_common_prefix_at_underscore(names: &[String]) -> String { + if names.is_empty() { + return String::new(); + } + + let first = &names[0]; + if first.is_empty() { + return String::new(); + } + + // Find character-by-character common prefix + let mut prefix_len = 0; + for (i, ch) in first.chars().enumerate() { + if names.iter().all(|n| n.chars().nth(i) == Some(ch)) { + prefix_len = i + 1; + } else { + break; + } + } + + if prefix_len == 0 { + return String::new(); + } + + let raw_prefix = &first[..prefix_len]; + + // If raw_prefix exactly matches a leaf name, it's a complete metric name. + // In this case, return it with trailing underscore (will be trimmed by caller). + if names.iter().any(|n| n == raw_prefix) { + return format!("{}_", raw_prefix); + } + + // Find the last underscore position to get a clean boundary + if let Some(last_underscore) = raw_prefix.rfind('_') + && last_underscore > 0 + { + let clean_prefix = &first[..=last_underscore]; + // Verify this still works for all names + if names.iter().all(|n| n.starts_with(clean_prefix)) { + return clean_prefix.to_string(); + } + } + + // If no underscore boundary works, check if full prefix ends at underscore + if raw_prefix.ends_with('_') { + return raw_prefix.to_string(); + } + + String::new() +} + +/// Infer the accumulated name for a child node based on a descendant leaf name. +pub fn infer_accumulated_name(parent_acc: &str, field_name: &str, descendant_leaf: &str) -> String { + if let Some(pos) = descendant_leaf.find(field_name) { + if pos == 0 { + return field_name.to_string(); + } + if pos > 0 && descendant_leaf.chars().nth(pos - 1) == Some('_') { + return if parent_acc.is_empty() { + field_name.to_string() + } else { + format!("{}_{}", parent_acc, field_name) + }; + } + } + + if parent_acc.is_empty() { + field_name.to_string() + } else { + format!("{}_{}", parent_acc, field_name) + } +} + +/// Get fields with child field information for generic pattern lookup. +pub fn get_fields_with_child_info( + children: &BTreeMap, + parent_name: &str, + pattern_lookup: &HashMap, String>, +) -> Vec<(PatternField, Option>)> { + children + .iter() + .map(|(name, node)| { + let (rust_type, json_type, indexes, child_fields) = match node { + TreeNode::Leaf(leaf) => ( + leaf.value_type().to_string(), + schema_to_json_type(&leaf.schema), + leaf.indexes().clone(), + None, + ), + TreeNode::Branch(grandchildren) => { + let child_fields = get_node_fields(grandchildren, pattern_lookup); + let pattern_name = pattern_lookup + .get(&child_fields) + .cloned() + .unwrap_or_else(|| child_type_name(parent_name, name)); + ( + pattern_name.clone(), + pattern_name, + BTreeSet::new(), + Some(child_fields), + ) + } + }; + ( + PatternField { + name: name.clone(), + rust_type, + json_type, + indexes, + type_param: None, + }, + child_fields, + ) + }) + .collect() +} diff --git a/crates/brk_bindgen/src/backends/javascript.rs b/crates/brk_bindgen/src/backends/javascript.rs new file mode 100644 index 000000000..b2013f781 --- /dev/null +++ b/crates/brk_bindgen/src/backends/javascript.rs @@ -0,0 +1,99 @@ +//! JavaScript language syntax implementation. + +use crate::{FieldNamePosition, GenericSyntax, LanguageSyntax, to_camel_case, to_pascal_case}; + +/// JavaScript-specific code generation syntax. +pub struct JavaScriptSyntax; + +impl LanguageSyntax for JavaScriptSyntax { + fn field_name(&self, name: &str) -> String { + to_camel_case(name) + } + + fn path_expr(&self, base_var: &str, suffix: &str) -> String { + // Convert base_var to camelCase for JavaScript + let var_name = to_camel_case(base_var); + format!("`${{{}}}{}`", var_name, suffix) + } + + fn position_expr(&self, pos: &FieldNamePosition, base_var: &str) -> String { + // Convert base_var to camelCase for JavaScript + let var_name = to_camel_case(base_var); + match pos { + FieldNamePosition::Append(s) => { + // Use helper _m(acc, suffix) to build metric name + // e.g., _m(acc, "cap") produces: acc ? `${acc}_cap` : 'cap' + if let Some(suffix) = s.strip_prefix('_') { + format!("_m({}, '{}')", var_name, suffix) + } else { + format!("`${{{}}}{}`", var_name, s) + } + } + FieldNamePosition::Prepend(s) => { + // Handle empty acc case for prepend + if let Some(prefix) = s.strip_suffix('_') { + format!( + "({} ? `{}${{{}}}` : '{}')", + var_name, s, var_name, prefix + ) + } else { + format!("`{}${{{}}}`", s, var_name) + } + } + FieldNamePosition::Identity => var_name, + FieldNamePosition::SetBase(s) => format!("'{}'", s), + } + } + + fn constructor(&self, type_name: &str, path_expr: &str) -> String { + format!("create{}(client, {})", type_name, path_expr) + } + + fn field_init(&self, indent: &str, name: &str, _type_ann: &str, value: &str) -> String { + // JavaScript uses object literal syntax; type is in JSDoc, not in assignment + format!("{}{}: {},", indent, name, value) + } + + fn generic_syntax(&self) -> GenericSyntax { + GenericSyntax::JAVASCRIPT + } + + fn struct_header(&self, name: &str, generic_params: &str, doc: Option<&str>) -> String { + let mut result = String::new(); + if let Some(doc) = doc { + result.push_str(&format!("/** {} */\n", doc)); + } + // JavaScript uses factory functions that return object literals + result.push_str(&format!( + "function create{}{}(client, basePath) {{\n return {{\n", + name, generic_params + )); + result + } + + fn struct_footer(&self) -> String { + " };\n}\n".to_string() + } + + fn constructor_header(&self, _params: &str) -> String { + // JavaScript factory functions don't have a separate constructor + String::new() + } + + fn constructor_footer(&self) -> String { + String::new() + } + + fn field_declaration(&self, indent: &str, _name: &str, type_ann: &str) -> String { + // JSDoc property declaration + format!("{}/** @type {{{}}} */\n", indent, type_ann) + } + + fn index_field_name(&self, index_name: &str) -> String { + format!("by{}", to_pascal_case(index_name)) + } + + fn string_literal(&self, value: &str) -> String { + format!("'{}'", value) + } +} diff --git a/crates/brk_bindgen/src/backends/mod.rs b/crates/brk_bindgen/src/backends/mod.rs new file mode 100644 index 000000000..f73625854 --- /dev/null +++ b/crates/brk_bindgen/src/backends/mod.rs @@ -0,0 +1,12 @@ +//! Language-specific syntax backends. +//! +//! This module contains implementations of the `LanguageSyntax` trait +//! for each supported target language. + +mod javascript; +mod python; +mod rust; + +pub use javascript::JavaScriptSyntax; +pub use python::PythonSyntax; +pub use rust::RustSyntax; diff --git a/crates/brk_bindgen/src/backends/python.rs b/crates/brk_bindgen/src/backends/python.rs new file mode 100644 index 000000000..a311a5f65 --- /dev/null +++ b/crates/brk_bindgen/src/backends/python.rs @@ -0,0 +1,89 @@ +//! Python language syntax implementation. + +use crate::{FieldNamePosition, GenericSyntax, LanguageSyntax, escape_python_keyword, to_snake_case}; + +/// Python-specific code generation syntax. +pub struct PythonSyntax; + +impl LanguageSyntax for PythonSyntax { + fn field_name(&self, name: &str) -> String { + escape_python_keyword(&to_snake_case(name)) + } + + fn path_expr(&self, base_var: &str, suffix: &str) -> String { + format!("f'{{{{{}}}}}{}'", base_var, suffix) + } + + fn position_expr(&self, pos: &FieldNamePosition, base_var: &str) -> String { + match pos { + FieldNamePosition::Append(s) => { + // Use helper _m(acc, suffix) to build metric name + if let Some(suffix) = s.strip_prefix('_') { + format!("_m({}, '{}')", base_var, suffix) + } else { + format!("f'{{{{{}}}}}{}'", base_var, s) + } + } + FieldNamePosition::Prepend(s) => { + // Handle empty acc case for prepend + if let Some(prefix) = s.strip_suffix('_') { + format!( + "(f'{s}{{{{{base_var}}}}}' if {base_var} else '{prefix}')", + s = s, + base_var = base_var, + prefix = prefix + ) + } else { + format!("f'{}{{{{{}}}}}'", s, base_var) + } + } + FieldNamePosition::Identity => base_var.to_string(), + FieldNamePosition::SetBase(s) => format!("'{}'", s), + } + } + + fn constructor(&self, type_name: &str, path_expr: &str) -> String { + format!("{}(client, {})", type_name, path_expr) + } + + fn field_init(&self, indent: &str, name: &str, type_ann: &str, value: &str) -> String { + format!("{}self.{}: {} = {}", indent, name, type_ann, value) + } + + fn generic_syntax(&self) -> GenericSyntax { + GenericSyntax::PYTHON + } + + fn struct_header(&self, name: &str, generic_params: &str, doc: Option<&str>) -> String { + let mut result = format!("class {}{}:\n", name, generic_params); + if let Some(doc) = doc { + result.push_str(&format!(" \"\"\"{}\"\"\"\n", doc)); + } + result + } + + fn struct_footer(&self) -> String { + String::new() + } + + fn constructor_header(&self, params: &str) -> String { + format!(" def __init__(self{}) -> None:\n", params) + } + + fn constructor_footer(&self) -> String { + String::new() + } + + fn field_declaration(&self, _indent: &str, _name: &str, _type_ann: &str) -> String { + // Python uses __init__ for field declarations, so this is a no-op + String::new() + } + + fn index_field_name(&self, index_name: &str) -> String { + format!("by_{}", to_snake_case(index_name)) + } + + fn string_literal(&self, value: &str) -> String { + format!("'{}'", value) + } +} diff --git a/crates/brk_bindgen/src/backends/rust.rs b/crates/brk_bindgen/src/backends/rust.rs new file mode 100644 index 000000000..5b97ba83b --- /dev/null +++ b/crates/brk_bindgen/src/backends/rust.rs @@ -0,0 +1,89 @@ +//! Rust language syntax implementation. + +use crate::{FieldNamePosition, GenericSyntax, LanguageSyntax, to_snake_case}; + +/// Rust-specific code generation syntax. +pub struct RustSyntax; + +impl LanguageSyntax for RustSyntax { + fn field_name(&self, name: &str) -> String { + to_snake_case(name) + } + + fn path_expr(&self, base_var: &str, suffix: &str) -> String { + format!("format!(\"{{{}}}{}\")", base_var, suffix) + } + + fn position_expr(&self, pos: &FieldNamePosition, _base_var: &str) -> String { + match pos { + FieldNamePosition::Append(s) => { + // Use helper _m(&acc, suffix) to build metric name + if let Some(suffix) = s.strip_prefix('_') { + format!("_m(&acc, \"{}\")", suffix) + } else { + format!("format!(\"{{acc}}{}\")", s) + } + } + FieldNamePosition::Prepend(s) => { + // Handle empty acc case for prepend + if let Some(prefix) = s.strip_suffix('_') { + format!( + "if acc.is_empty() {{ \"{prefix}\".to_string() }} else {{ format!(\"{s}{{acc}}\") }}", + prefix = prefix, + s = s + ) + } else { + format!("format!(\"{}{{acc}}\")", s) + } + } + FieldNamePosition::Identity => "acc.clone()".to_string(), + FieldNamePosition::SetBase(base) => format!("\"{}\".to_string()", base), + } + } + + fn constructor(&self, type_name: &str, path_expr: &str) -> String { + format!("{}::new(client.clone(), {})", type_name, path_expr) + } + + fn field_init(&self, indent: &str, name: &str, _type_ann: &str, value: &str) -> String { + // Rust struct initialization; type is in struct definition, not in init + format!("{}{}: {},", indent, name, value) + } + + fn generic_syntax(&self) -> GenericSyntax { + GenericSyntax::RUST + } + + fn struct_header(&self, name: &str, generic_params: &str, doc: Option<&str>) -> String { + let mut result = String::new(); + if let Some(doc) = doc { + result.push_str(&format!("/// {}\n", doc)); + } + result.push_str(&format!("pub struct {}{} {{\n", name, generic_params)); + result + } + + fn struct_footer(&self) -> String { + "}\n".to_string() + } + + fn constructor_header(&self, params: &str) -> String { + format!(" pub fn new({}) -> Self {{\n Self {{\n", params) + } + + fn constructor_footer(&self) -> String { + " }\n }\n".to_string() + } + + fn field_declaration(&self, indent: &str, name: &str, type_ann: &str) -> String { + format!("{}pub {}: {},\n", indent, name, type_ann) + } + + fn index_field_name(&self, index_name: &str) -> String { + format!("by_{}", to_snake_case(index_name)) + } + + fn string_literal(&self, value: &str) -> String { + format!("\"{}\".to_string()", value) + } +} diff --git a/crates/brk_bindgen/src/generate/fields.rs b/crates/brk_bindgen/src/generate/fields.rs new file mode 100644 index 000000000..e3db9103d --- /dev/null +++ b/crates/brk_bindgen/src/generate/fields.rs @@ -0,0 +1,136 @@ +//! Shared field generation logic. +//! +//! This module contains the core field generation logic that is shared +//! across all language backends. The `LanguageSyntax` trait is used to +//! abstract over language-specific formatting. + +use std::fmt::Write; + +use crate::{ClientMetadata, LanguageSyntax, PatternField, StructuralPattern}; + +/// Create a path suffix from a name. +/// Adds `_` prefix only if the name doesn't already start with `_`. +fn path_suffix(name: &str) -> String { + if name.starts_with('_') { + name.to_string() + } else { + format!("_{}", name) + } +} + +/// Generate a parameterized field using the language syntax. +/// +/// This is used for pattern instances where fields use an accumulated +/// metric name that's built up through the tree traversal. +pub fn generate_parameterized_field( + output: &mut String, + syntax: &S, + field: &PatternField, + pattern: &StructuralPattern, + metadata: &ClientMetadata, + indent: &str, +) { + let field_name = syntax.field_name(&field.name); + let type_ann = metadata.field_type_annotation(field, pattern.is_generic, None, syntax.generic_syntax()); + + // Compute path expression from field position + let path_expr = pattern + .get_field_position(&field.name) + .map(|pos| syntax.position_expr(pos, "acc")) + .unwrap_or_else(|| syntax.path_expr("acc", &path_suffix(&field.name))); + + let value = if metadata.is_pattern_type(&field.rust_type) { + syntax.constructor(&field.rust_type, &path_expr) + } else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) { + syntax.constructor(&accessor.name, &path_expr) + } else { + panic!( + "Field '{}' has no matching pattern or index accessor. All metrics must be indexed.", + field.name + ) + }; + + writeln!(output, "{}", syntax.field_init(indent, &field_name, &type_ann, &value)).unwrap(); +} + +/// Generate a tree-path field using the language syntax. +/// +/// This is the fallback for non-parameterizable patterns where fields +/// use a base path that's extended with the field name. +pub fn generate_tree_path_field( + output: &mut String, + syntax: &S, + field: &PatternField, + metadata: &ClientMetadata, + indent: &str, +) { + let field_name = syntax.field_name(&field.name); + let type_ann = metadata.field_type_annotation(field, false, None, syntax.generic_syntax()); + let path_expr = syntax.path_expr("base_path", &path_suffix(&field.name)); + + let value = if metadata.is_pattern_type(&field.rust_type) { + syntax.constructor(&field.rust_type, &path_expr) + } else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) { + syntax.constructor(&accessor.name, &path_expr) + } else { + panic!( + "Field '{}' has no matching pattern or index accessor. All metrics must be indexed.", + field.name + ) + }; + + writeln!(output, "{}", syntax.field_init(indent, &field_name, &type_ann, &value)).unwrap(); +} + +/// Generate a tree node field with a specific child node for pattern instance base detection. +/// +/// This is used when generating tree nodes where we need to detect the pattern instance +/// base from descendant leaf names. +pub fn generate_tree_node_field( + output: &mut String, + syntax: &S, + field: &PatternField, + metadata: &ClientMetadata, + indent: &str, + child_name: &str, + pattern_base: Option<&str>, +) { + let field_name = syntax.field_name(&field.name); + let type_ann = metadata.field_type_annotation(field, false, None, syntax.generic_syntax()); + + let value = if metadata.is_pattern_type(&field.rust_type) { + // Check if this pattern is parameterizable + let pattern = metadata.find_pattern(&field.rust_type); + let is_parameterizable = pattern.is_some_and(|p| p.is_parameterizable()); + + if is_parameterizable { + if let Some(base) = pattern_base { + // Use the detected metric base + let path = syntax.string_literal(base); + syntax.constructor(&field.rust_type, &path) + } else { + // Fallback to tree path + let path_expr = syntax.path_expr("base_path", &path_suffix(child_name)); + syntax.constructor(&field.rust_type, &path_expr) + } + } else { + let path_expr = syntax.path_expr("base_path", &path_suffix(child_name)); + syntax.constructor(&field.rust_type, &path_expr) + } + } else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) { + let path_expr = syntax.path_expr("base_path", &path_suffix(child_name)); + syntax.constructor(&accessor.name, &path_expr) + } else if field.is_branch() { + // Non-pattern branch - instantiate the nested struct + let path_expr = syntax.path_expr("base_path", &path_suffix(child_name)); + syntax.constructor(&field.rust_type, &path_expr) + } else { + // All metrics must be indexed + panic!( + "Field '{}' is a leaf with no index accessor. All metrics must be indexed.", + field.name + ) + }; + + writeln!(output, "{}", syntax.field_init(indent, &field_name, &type_ann, &value)).unwrap(); +} diff --git a/crates/brk_bindgen/src/generate/mod.rs b/crates/brk_bindgen/src/generate/mod.rs new file mode 100644 index 000000000..817320bb4 --- /dev/null +++ b/crates/brk_bindgen/src/generate/mod.rs @@ -0,0 +1,9 @@ +//! Shared code generation logic. +//! +//! This module contains generation functions that are parameterized by +//! the `LanguageSyntax` trait, allowing them to work across all supported +//! language backends. + +mod fields; + +pub use fields::*; diff --git a/crates/brk_bindgen/src/generators/javascript/api.rs b/crates/brk_bindgen/src/generators/javascript/api.rs new file mode 100644 index 000000000..ac583406b --- /dev/null +++ b/crates/brk_bindgen/src/generators/javascript/api.rs @@ -0,0 +1,112 @@ +//! JavaScript API method generation. + +use std::fmt::Write; + +use crate::{Endpoint, Parameter, to_camel_case}; + +/// Generate API methods for the BrkClient class. +pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) { + for endpoint in endpoints { + if !endpoint.should_generate() { + continue; + } + + let method_name = endpoint_to_method_name(endpoint); + let return_type = endpoint.response_type.as_deref().unwrap_or("*"); + + writeln!(output, " /**").unwrap(); + if let Some(summary) = &endpoint.summary { + writeln!(output, " * {}", summary).unwrap(); + } + if let Some(desc) = &endpoint.description + && endpoint.summary.as_ref() != Some(desc) + { + writeln!(output, " * @description {}", desc).unwrap(); + } + + for param in &endpoint.path_params { + let desc = param.description.as_deref().unwrap_or(""); + writeln!( + output, + " * @param {{{}}} {} {}", + param.param_type, param.name, desc + ) + .unwrap(); + } + for param in &endpoint.query_params { + let optional = if param.required { "" } else { "=" }; + let desc = param.description.as_deref().unwrap_or(""); + writeln!( + output, + " * @param {{{}{}}} [{}] {}", + param.param_type, optional, param.name, desc + ) + .unwrap(); + } + + writeln!(output, " * @returns {{Promise<{}>}}", return_type).unwrap(); + writeln!(output, " */").unwrap(); + + let params = build_method_params(endpoint); + writeln!(output, " async {}({}) {{", method_name, params).unwrap(); + + let path = build_path_template(&endpoint.path, &endpoint.path_params); + + if endpoint.query_params.is_empty() { + writeln!(output, " return this.get(`{}`);", path).unwrap(); + } else { + writeln!(output, " const params = new URLSearchParams();").unwrap(); + for param in &endpoint.query_params { + if param.required { + writeln!( + output, + " params.set('{}', String({}));", + param.name, param.name + ) + .unwrap(); + } else { + writeln!( + output, + " if ({} !== undefined) params.set('{}', String({}));", + param.name, param.name, param.name + ) + .unwrap(); + } + } + writeln!(output, " const query = params.toString();").unwrap(); + writeln!( + output, + " return this.get(`{}${{query ? '?' + query : ''}}`);", + path + ) + .unwrap(); + } + + writeln!(output, " }}\n").unwrap(); + } +} + +fn endpoint_to_method_name(endpoint: &Endpoint) -> String { + to_camel_case(&endpoint.operation_name()) +} + +fn build_method_params(endpoint: &Endpoint) -> String { + let mut params = Vec::new(); + for param in &endpoint.path_params { + params.push(param.name.clone()); + } + for param in &endpoint.query_params { + params.push(param.name.clone()); + } + params.join(", ") +} + +fn build_path_template(path: &str, path_params: &[Parameter]) -> String { + let mut result = path.to_string(); + for param in path_params { + let placeholder = format!("{{{}}}", param.name); + let interpolation = format!("${{{}}}", param.name); + result = result.replace(&placeholder, &interpolation); + } + result +} diff --git a/crates/brk_bindgen/src/generators/javascript/client.rs b/crates/brk_bindgen/src/generators/javascript/client.rs new file mode 100644 index 000000000..efeca8fa7 --- /dev/null +++ b/crates/brk_bindgen/src/generators/javascript/client.rs @@ -0,0 +1,374 @@ +//! JavaScript base client and pattern factory generation. + +use std::fmt::Write; + +use brk_cohort::{ + AGE_RANGE_NAMES, AMOUNT_RANGE_NAMES, EPOCH_NAMES, GE_AMOUNT_NAMES, LT_AMOUNT_NAMES, + MAX_AGE_NAMES, MIN_AGE_NAMES, SPENDABLE_TYPE_NAMES, TERM_NAMES, YEAR_NAMES, +}; +use brk_types::{Index, PoolSlug, pools}; +use serde::Serialize; +use serde_json::Value; + +use crate::{ + ClientMetadata, GenericSyntax, IndexSetPattern, JavaScriptSyntax, PatternField, + StructuralPattern, VERSION, generate_parameterized_field, generate_tree_path_field, + to_camel_case, +}; + +/// Generate the base BrkClient class with HTTP functionality. +pub fn generate_base_client(output: &mut String) { + writeln!( + output, + r#"/** + * @typedef {{Object}} BrkClientOptions + * @property {{string}} baseUrl - Base URL for the API + * @property {{number}} [timeout] - Request timeout in milliseconds + */ + +const _isBrowser = typeof window !== 'undefined' && 'caches' in window; +const _runIdle = (/** @type {{VoidFunction}} */ fn) => (globalThis.requestIdleCallback ?? setTimeout)(fn); + +/** @type {{Promise}} */ +const _cachePromise = _isBrowser + ? caches.open('__BRK_CLIENT__').catch(() => null) + : Promise.resolve(null); + +/** + * Custom error class for BRK client errors + */ +class BrkError extends Error {{ + /** + * @param {{string}} message + * @param {{number}} [status] + */ + constructor(message, status) {{ + super(message); + this.name = 'BrkError'; + this.status = status; + }} +}} + +/** + * @template T + * @typedef {{Object}} Endpoint + * @property {{(onUpdate?: (value: T[]) => void) => Promise}} get - Fetch all data points + * @property {{(from?: number, to?: number, onUpdate?: (value: T[]) => void) => Promise}} range - Fetch data in range + * @property {{string}} path - The endpoint path + */ + +/** + * @template T + * @typedef {{Object}} MetricPattern + * @property {{string}} name - The metric name + * @property {{Partial>>}} by - Index endpoints (lazy getters) + * @property {{() => Index[]}} indexes - Get the list of available indexes + * @property {{(index: Index) => Endpoint|undefined}} get - Get an endpoint for a specific index + */ + +/** + * Create an endpoint for a metric index. + * @template T + * @param {{BrkClientBase}} client + * @param {{string}} name - The metric vec name + * @param {{Index}} index - The index name + * @returns {{Endpoint}} + */ +function _endpoint(client, name, index) {{ + const p = `/api/metric/${{name}}/${{index}}`; + return {{ + get: (onUpdate) => client.get(p, onUpdate), + range: (from, to, onUpdate) => {{ + const params = new URLSearchParams(); + if (from !== undefined) params.set('from', String(from)); + if (to !== undefined) params.set('to', String(to)); + const query = params.toString(); + return client.get(query ? `${{p}}?${{query}}` : p, onUpdate); + }}, + get path() {{ return p; }}, + }}; +}} + +/** + * Base HTTP client for making requests with caching support + */ +class BrkClientBase {{ + /** + * @param {{BrkClientOptions|string}} options + */ + constructor(options) {{ + const isString = typeof options === 'string'; + this.baseUrl = isString ? options : options.baseUrl; + this.timeout = isString ? 5000 : (options.timeout ?? 5000); + }} + + /** + * Make a GET request with stale-while-revalidate caching + * @template T + * @param {{string}} path + * @param {{(value: T) => void}} [onUpdate] - Called when data is available + * @returns {{Promise}} + */ + async get(path, onUpdate) {{ + const base = this.baseUrl.endsWith('/') ? this.baseUrl.slice(0, -1) : this.baseUrl; + const url = `${{base}}${{path}}`; + const cache = await _cachePromise; + const cachedRes = await cache?.match(url); + const cachedJson = cachedRes ? await cachedRes.json() : null; + + if (cachedJson) onUpdate?.(cachedJson); + if (!globalThis.navigator?.onLine) {{ + if (cachedJson) return cachedJson; + throw new BrkError('Offline and no cached data available'); + }} + + try {{ + const res = await fetch(url, {{ signal: AbortSignal.timeout(this.timeout) }}); + if (!res.ok) throw new BrkError(`HTTP ${{res.status}}`, res.status); + if (cachedRes?.headers.get('ETag') === res.headers.get('ETag')) return cachedJson; + + const cloned = res.clone(); + const json = await res.json(); + onUpdate?.(json); + if (cache) _runIdle(() => cache.put(url, cloned)); + return json; + }} catch (e) {{ + if (cachedJson) return cachedJson; + throw e; + }} + }} +}} + +/** + * Build metric name with optional prefix. + * @param {{string}} acc - Accumulated prefix + * @param {{string}} s - Metric suffix + * @returns {{string}} + */ +const _m = (acc, s) => acc ? `${{acc}}_${{s}}` : s; + +"# + ) + .unwrap(); +} + +/// Generate static constants for the BrkClient class. +pub fn generate_static_constants(output: &mut String) { + fn instance_const(output: &mut String, name: &str, value: &T) { + write_static_const(output, name, &serde_json::to_string_pretty(value).unwrap()); + } + + fn instance_const_raw(output: &mut String, name: &str, value: &str) { + writeln!(output, " {} = {};\n", name, value).unwrap(); + } + + instance_const_raw(output, "VERSION", &format!("\"v{}\"", VERSION)); + + let indexes = Index::all(); + let indexes_json: Vec<&'static str> = indexes.iter().map(|i| i.serialize_long()).collect(); + instance_const(output, "INDEXES", &indexes_json); + + let pools = pools(); + let mut sorted_pools: Vec<_> = pools.iter().collect(); + sorted_pools.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); + let pool_map: std::collections::BTreeMap = + sorted_pools.iter().map(|p| (p.slug(), p.name)).collect(); + instance_const(output, "POOL_ID_TO_POOL_NAME", &pool_map); + + fn instance_const_camel(output: &mut String, name: &str, value: &T) { + let json_value: Value = serde_json::to_value(value).unwrap(); + let camel_value = camel_case_top_level_keys(json_value); + write_static_const(output, name, &serde_json::to_string_pretty(&camel_value).unwrap()); + } + + instance_const_camel(output, "TERM_NAMES", &TERM_NAMES); + instance_const_camel(output, "EPOCH_NAMES", &EPOCH_NAMES); + instance_const_camel(output, "YEAR_NAMES", &YEAR_NAMES); + instance_const_camel(output, "SPENDABLE_TYPE_NAMES", &SPENDABLE_TYPE_NAMES); + instance_const_camel(output, "AGE_RANGE_NAMES", &AGE_RANGE_NAMES); + instance_const_camel(output, "MAX_AGE_NAMES", &MAX_AGE_NAMES); + instance_const_camel(output, "MIN_AGE_NAMES", &MIN_AGE_NAMES); + instance_const_camel(output, "AMOUNT_RANGE_NAMES", &AMOUNT_RANGE_NAMES); + instance_const_camel(output, "GE_AMOUNT_NAMES", &GE_AMOUNT_NAMES); + instance_const_camel(output, "LT_AMOUNT_NAMES", <_AMOUNT_NAMES); +} + +fn camel_case_top_level_keys(value: Value) -> Value { + match value { + Value::Object(map) => { + let new_map: serde_json::Map = map + .into_iter() + .map(|(k, v)| (to_camel_case(&k), v)) + .collect(); + Value::Object(new_map) + } + other => other, + } +} + +fn indent_json_const(json: &str) -> String { + json.lines() + .enumerate() + .map(|(i, line)| if i == 0 { line.to_string() } else { format!(" {}", line) }) + .collect::>() + .join("\n") +} + +fn write_static_const(output: &mut String, name: &str, json: &str) { + writeln!(output, " {} = /** @type {{const}} */ ({});\n", name, indent_json_const(json)).unwrap(); +} + +/// Generate index accessor factory functions. +pub fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) { + if patterns.is_empty() { + return; + } + + writeln!(output, "// Index accessor factory functions\n").unwrap(); + + for pattern in patterns { + let by_fields: Vec = pattern + .indexes + .iter() + .map(|idx| format!("{}: Endpoint", idx.serialize_long())) + .collect(); + let by_type = format!("{{ {} }}", by_fields.join(", ")); + + writeln!(output, "/**").unwrap(); + writeln!(output, " * @template T").unwrap(); + writeln!( + output, + " * @typedef {{{{ name: string, by: {}, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }}}} {}", + by_type, pattern.name + ) + .unwrap(); + writeln!(output, " */\n").unwrap(); + + writeln!(output, "/**").unwrap(); + writeln!(output, " * Create a {} accessor", pattern.name).unwrap(); + writeln!(output, " * @template T").unwrap(); + writeln!(output, " * @param {{BrkClientBase}} client").unwrap(); + writeln!(output, " * @param {{string}} name - The metric vec name").unwrap(); + writeln!(output, " * @returns {{{}}}", pattern.name).unwrap(); + writeln!(output, " */").unwrap(); + writeln!(output, "function create{}(client, name) {{", pattern.name).unwrap(); + writeln!(output, " return {{").unwrap(); + writeln!(output, " name,").unwrap(); + writeln!(output, " by: {{").unwrap(); + + for (i, index) in pattern.indexes.iter().enumerate() { + let index_name = index.serialize_long(); + let comma = if i < pattern.indexes.len() - 1 { "," } else { "" }; + writeln!( + output, + " get {}() {{ return _endpoint(client, name, '{}'); }}{}", + index_name, index_name, comma + ) + .unwrap(); + } + + writeln!(output, " }},").unwrap(); + writeln!(output, " indexes() {{").unwrap(); + + write!(output, " return [").unwrap(); + for (i, index) in pattern.indexes.iter().enumerate() { + if i > 0 { + write!(output, ", ").unwrap(); + } + write!(output, "'{}'", index.serialize_long()).unwrap(); + } + writeln!(output, "];").unwrap(); + + writeln!(output, " }},").unwrap(); + writeln!(output, " get(index) {{").unwrap(); + writeln!(output, " if (this.indexes().includes(index)) {{").unwrap(); + writeln!(output, " return _endpoint(client, name, index);").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, " }};").unwrap(); + writeln!(output, "}}\n").unwrap(); + } +} + +/// Generate structural pattern factory functions. +pub fn generate_structural_patterns( + output: &mut String, + patterns: &[StructuralPattern], + metadata: &ClientMetadata, +) { + if patterns.is_empty() { + return; + } + + writeln!(output, "// Reusable structural pattern factories\n").unwrap(); + + for pattern in patterns { + let is_parameterizable = pattern.is_parameterizable(); + + writeln!(output, "/**").unwrap(); + if pattern.is_generic { + writeln!(output, " * @template T").unwrap(); + } + writeln!(output, " * @typedef {{Object}} {}", pattern.name).unwrap(); + for field in &pattern.fields { + let js_type = field_type_annotation(field, metadata, pattern.is_generic); + writeln!( + output, + " * @property {{{}}} {}", + js_type, + to_camel_case(&field.name) + ) + .unwrap(); + } + writeln!(output, " */\n").unwrap(); + + writeln!(output, "/**").unwrap(); + writeln!(output, " * Create a {} pattern node", pattern.name).unwrap(); + if pattern.is_generic { + writeln!(output, " * @template T").unwrap(); + } + writeln!(output, " * @param {{BrkClientBase}} client").unwrap(); + if is_parameterizable { + writeln!(output, " * @param {{string}} acc - Accumulated metric name").unwrap(); + } else { + writeln!(output, " * @param {{string}} basePath").unwrap(); + } + let return_type = if pattern.is_generic { + format!("{}", pattern.name) + } else { + pattern.name.clone() + }; + writeln!(output, " * @returns {{{}}}", return_type).unwrap(); + writeln!(output, " */").unwrap(); + + let param_name = if is_parameterizable { "acc" } else { "basePath" }; + writeln!(output, "function create{}(client, {}) {{", pattern.name, param_name).unwrap(); + writeln!(output, " return {{").unwrap(); + + let syntax = JavaScriptSyntax; + for field in &pattern.fields { + if is_parameterizable { + generate_parameterized_field(output, &syntax, field, pattern, metadata, " "); + } else { + generate_tree_path_field(output, &syntax, field, metadata, " "); + } + } + + writeln!(output, " }};").unwrap(); + writeln!(output, "}}\n").unwrap(); + } +} + +fn field_type_annotation(field: &PatternField, metadata: &ClientMetadata, is_generic: bool) -> String { + metadata.field_type_annotation(field, is_generic, None, GenericSyntax::JAVASCRIPT) +} + +/// Get field type with specific generic value type. +pub fn field_type_with_generic( + field: &PatternField, + metadata: &ClientMetadata, + is_generic: bool, + generic_value_type: Option<&str>, +) -> String { + metadata.field_type_annotation(field, is_generic, generic_value_type, GenericSyntax::JAVASCRIPT) +} diff --git a/crates/brk_bindgen/src/generators/javascript/mod.rs b/crates/brk_bindgen/src/generators/javascript/mod.rs new file mode 100644 index 000000000..a51c909d6 --- /dev/null +++ b/crates/brk_bindgen/src/generators/javascript/mod.rs @@ -0,0 +1,65 @@ +//! JavaScript client generation. +//! +//! This module generates a JavaScript + JSDoc client for the BRK API. + +mod api; +mod client; +mod tree; +mod types; + +use std::{fmt::Write, fs, io, path::Path}; + +use serde_json::json; + +use crate::{ClientMetadata, Endpoint, TypeSchemas, VERSION}; + +/// Generate JavaScript + JSDoc client from metadata and OpenAPI endpoints. +/// +/// `output_path` is the full path to the output file (e.g., "modules/brk-client/index.js"). +pub fn generate_javascript_client( + metadata: &ClientMetadata, + endpoints: &[Endpoint], + schemas: &TypeSchemas, + output_path: &Path, +) -> io::Result<()> { + let mut output = String::new(); + + writeln!(output, "// Auto-generated BRK JavaScript client").unwrap(); + writeln!(output, "// Do not edit manually\n").unwrap(); + + types::generate_type_definitions(&mut output, schemas); + client::generate_base_client(&mut output); + client::generate_index_accessors(&mut output, &metadata.index_set_patterns); + client::generate_structural_patterns(&mut output, &metadata.structural_patterns, metadata); + tree::generate_tree_typedefs(&mut output, &metadata.catalog, metadata); + tree::generate_main_client(&mut output, &metadata.catalog, metadata, endpoints); + + fs::write(output_path, output)?; + + // Update package.json version if it exists in the same directory + if let Some(parent) = output_path.parent() { + let package_json_path = parent.join("package.json"); + if package_json_path.exists() { + update_package_json_version(&package_json_path)?; + } + } + + Ok(()) +} + +fn update_package_json_version(package_json_path: &Path) -> io::Result<()> { + let content = fs::read_to_string(package_json_path)?; + let mut package: serde_json::Value = serde_json::from_str(&content) + .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; + + if let Some(obj) = package.as_object_mut() { + obj.insert("version".to_string(), json!(VERSION)); + } + + let updated = serde_json::to_string_pretty(&package) + .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; + + fs::write(package_json_path, updated + "\n")?; + + Ok(()) +} diff --git a/crates/brk_bindgen/src/generators/javascript/tree.rs b/crates/brk_bindgen/src/generators/javascript/tree.rs new file mode 100644 index 000000000..9fc11d02f --- /dev/null +++ b/crates/brk_bindgen/src/generators/javascript/tree.rs @@ -0,0 +1,223 @@ +//! JavaScript tree structure generation. + +use std::collections::HashSet; +use std::fmt::Write; + +use brk_types::TreeNode; + +use crate::{ + ClientMetadata, Endpoint, PatternField, child_type_name, get_fields_with_child_info, + get_first_leaf_name, get_node_fields, get_pattern_instance_base, infer_accumulated_name, + to_camel_case, +}; + +use super::api::generate_api_methods; +use super::client::{field_type_with_generic, generate_static_constants}; + +/// Generate JSDoc typedefs for the catalog tree. +pub fn generate_tree_typedefs(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) { + writeln!(output, "// Catalog tree typedefs\n").unwrap(); + + let pattern_lookup = metadata.pattern_lookup(); + let mut generated = HashSet::new(); + generate_tree_typedef( + output, + "CatalogTree", + catalog, + &pattern_lookup, + metadata, + &mut generated, + ); +} + +fn generate_tree_typedef( + output: &mut String, + name: &str, + node: &TreeNode, + pattern_lookup: &std::collections::HashMap, String>, + metadata: &ClientMetadata, + generated: &mut HashSet, +) { + let TreeNode::Branch(children) = node else { + return; + }; + + let fields_with_child_info = get_fields_with_child_info(children, name, pattern_lookup); + let fields: Vec = fields_with_child_info + .iter() + .map(|(f, _)| f.clone()) + .collect(); + + if pattern_lookup.contains_key(&fields) + && pattern_lookup.get(&fields) != Some(&name.to_string()) + { + return; + } + + if generated.contains(name) { + return; + } + generated.insert(name.to_string()); + + writeln!(output, "/**").unwrap(); + writeln!(output, " * @typedef {{Object}} {}", name).unwrap(); + + for (field, child_fields) in &fields_with_child_info { + let generic_value_type = child_fields + .as_ref() + .and_then(|cf| metadata.get_type_param(cf)) + .map(String::as_str); + let js_type = field_type_with_generic(field, metadata, false, generic_value_type); + writeln!( + output, + " * @property {{{}}} {}", + js_type, + to_camel_case(&field.name) + ) + .unwrap(); + } + + writeln!(output, " */\n").unwrap(); + + for (child_name, child_node) in children { + if let TreeNode::Branch(grandchildren) = child_node { + let child_fields = get_node_fields(grandchildren, pattern_lookup); + if !pattern_lookup.contains_key(&child_fields) { + let child_type = child_type_name(name, child_name); + generate_tree_typedef( + output, + &child_type, + child_node, + pattern_lookup, + metadata, + generated, + ); + } + } + } +} + +/// Generate the main BrkClient class. +pub fn generate_main_client( + output: &mut String, + catalog: &TreeNode, + metadata: &ClientMetadata, + endpoints: &[Endpoint], +) { + let pattern_lookup = metadata.pattern_lookup(); + + writeln!(output, "/**").unwrap(); + writeln!(output, " * Main BRK client with catalog tree and API methods").unwrap(); + writeln!(output, " * @extends BrkClientBase").unwrap(); + writeln!(output, " */").unwrap(); + writeln!(output, "class BrkClient extends BrkClientBase {{").unwrap(); + + generate_static_constants(output); + + writeln!(output, " /**").unwrap(); + writeln!(output, " * @param {{BrkClientOptions|string}} options").unwrap(); + writeln!(output, " */").unwrap(); + writeln!(output, " constructor(options) {{").unwrap(); + writeln!(output, " super(options);").unwrap(); + writeln!(output, " /** @type {{CatalogTree}} */").unwrap(); + writeln!(output, " this.tree = this._buildTree('');").unwrap(); + writeln!(output, " }}\n").unwrap(); + + writeln!(output, " /**").unwrap(); + writeln!(output, " * @private").unwrap(); + writeln!(output, " * @param {{string}} basePath").unwrap(); + writeln!(output, " * @returns {{CatalogTree}}").unwrap(); + writeln!(output, " */").unwrap(); + writeln!(output, " _buildTree(basePath) {{").unwrap(); + writeln!(output, " return {{").unwrap(); + generate_tree_initializer(output, catalog, "", 3, &pattern_lookup, metadata); + writeln!(output, " }};").unwrap(); + writeln!(output, " }}\n").unwrap(); + + generate_api_methods(output, endpoints); + + writeln!(output, "}}\n").unwrap(); + + writeln!(output, "export {{ BrkClient, BrkClientBase, BrkError }};").unwrap(); +} + +fn generate_tree_initializer( + output: &mut String, + node: &TreeNode, + accumulated_name: &str, + indent: usize, + pattern_lookup: &std::collections::HashMap, String>, + metadata: &ClientMetadata, +) { + let indent_str = " ".repeat(indent); + + if let TreeNode::Branch(children) = node { + for (i, (child_name, child_node)) in children.iter().enumerate() { + let field_name = to_camel_case(child_name); + let comma = if i < children.len() - 1 { "," } else { "" }; + + match child_node { + TreeNode::Leaf(leaf) => { + let accessor = metadata + .find_index_set_pattern(leaf.indexes()) + .unwrap_or_else(|| { + panic!( + "Metric '{}' has no matching index pattern. All metrics must be indexed.", + leaf.name() + ) + }); + writeln!( + output, + "{}{}: create{}(this, '{}'){}", + indent_str, field_name, accessor.name, leaf.name(), comma + ) + .unwrap(); + } + TreeNode::Branch(grandchildren) => { + let child_fields = get_node_fields(grandchildren, pattern_lookup); + if let Some(pattern_name) = pattern_lookup.get(&child_fields) { + let pattern = metadata + .structural_patterns + .iter() + .find(|p| &p.name == pattern_name); + let is_parameterizable = + pattern.map(|p| p.is_parameterizable()).unwrap_or(false); + + let arg = if is_parameterizable { + get_pattern_instance_base(child_node) + } else if accumulated_name.is_empty() { + format!("/{}", child_name) + } else { + format!("{}/{}", accumulated_name, child_name) + }; + + writeln!( + output, + "{}{}: create{}(this, '{}'){}", + indent_str, field_name, pattern_name, arg, comma + ) + .unwrap(); + } else { + let child_acc = + infer_child_accumulated_name(child_node, accumulated_name, child_name); + writeln!(output, "{}{}: {{", indent_str, field_name).unwrap(); + generate_tree_initializer( + output, + child_node, + &child_acc, + indent + 1, + pattern_lookup, + metadata, + ); + writeln!(output, "{}}}{}", indent_str, comma).unwrap(); + } + } + } + } + } +} + +fn infer_child_accumulated_name(node: &TreeNode, parent_acc: &str, field_name: &str) -> String { + let leaf_name = get_first_leaf_name(node).unwrap_or_default(); + infer_accumulated_name(parent_acc, field_name, &leaf_name) +} diff --git a/crates/brk_bindgen/src/generators/javascript/types.rs b/crates/brk_bindgen/src/generators/javascript/types.rs new file mode 100644 index 000000000..84ddba7fb --- /dev/null +++ b/crates/brk_bindgen/src/generators/javascript/types.rs @@ -0,0 +1,172 @@ +//! JavaScript type definitions generation. + +use std::fmt::Write; + +use serde_json::Value; + +use crate::{TypeSchemas, ref_to_type_name, to_camel_case}; + +/// Generate JSDoc type definitions from OpenAPI schemas. +pub fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) { + if schemas.is_empty() { + return; + } + + writeln!(output, "// Type definitions\n").unwrap(); + + for (name, schema) in schemas { + let js_type = schema_to_js_type(schema, Some(name)); + + if is_primitive_alias(schema) { + writeln!(output, "/** @typedef {{{}}} {} */", js_type, name).unwrap(); + } else if let Some(props) = schema.get("properties").and_then(|p| p.as_object()) { + writeln!(output, "/**").unwrap(); + writeln!(output, " * @typedef {{Object}} {}", name).unwrap(); + for (prop_name, prop_schema) in props { + let prop_type = schema_to_js_type(prop_schema, Some(name)); + let required = schema + .get("required") + .and_then(|r| r.as_array()) + .map(|arr| arr.iter().any(|v| v.as_str() == Some(prop_name))) + .unwrap_or(false); + let optional = if required { "" } else { "=" }; + let safe_name = to_camel_case(prop_name); + writeln!( + output, + " * @property {{{}{}}} {}", + prop_type, optional, safe_name + ) + .unwrap(); + } + writeln!(output, " */").unwrap(); + } else { + writeln!(output, "/** @typedef {{{}}} {} */", js_type, name).unwrap(); + } + } + writeln!(output).unwrap(); +} + +fn is_primitive_alias(schema: &Value) -> bool { + schema.get("properties").is_none() + && schema.get("items").is_none() + && schema.get("anyOf").is_none() + && schema.get("oneOf").is_none() + && schema.get("enum").is_none() +} + +fn json_type_to_js(ty: &str, schema: &Value, current_type: Option<&str>) -> String { + match ty { + "integer" | "number" => "number".to_string(), + "boolean" => "boolean".to_string(), + "string" => "string".to_string(), + "null" => "null".to_string(), + "array" => { + let item_type = schema + .get("items") + .map(|s| schema_to_js_type(s, current_type)) + .unwrap_or_else(|| "*".to_string()); + format!("{}[]", item_type) + } + "object" => { + if let Some(add_props) = schema.get("additionalProperties") { + let value_type = schema_to_js_type(add_props, current_type); + return format!("{{ [key: string]: {} }}", value_type); + } + "Object".to_string() + } + _ => "*".to_string(), + } +} + +/// Convert a JSON schema to a JavaScript type string. +pub fn schema_to_js_type(schema: &Value, current_type: Option<&str>) -> String { + if let Some(all_of) = schema.get("allOf").and_then(|v| v.as_array()) { + for item in all_of { + let resolved = schema_to_js_type(item, current_type); + if resolved != "*" { + return resolved; + } + } + } + + if let Some(ref_path) = schema.get("$ref").and_then(|r| r.as_str()) { + return ref_to_type_name(ref_path).unwrap_or("*").to_string(); + } + + if let Some(enum_values) = schema.get("enum").and_then(|e| e.as_array()) { + let literals: Vec = enum_values + .iter() + .filter_map(|v| v.as_str()) + .map(|s| format!("\"{}\"", s)) + .collect(); + if !literals.is_empty() { + return format!("({})", literals.join("|")); + } + } + + if let Some(ty) = schema.get("type") { + if let Some(type_array) = ty.as_array() { + let types: Vec = type_array + .iter() + .filter_map(|t| t.as_str()) + .filter(|t| *t != "null") + .map(|t| json_type_to_js(t, schema, current_type)) + .collect(); + let has_null = type_array.iter().any(|t| t.as_str() == Some("null")); + + if types.len() == 1 { + let base_type = &types[0]; + return if has_null { + format!("?{}", base_type) + } else { + base_type.clone() + }; + } else if !types.is_empty() { + let union = format!("({})", types.join("|")); + return if has_null { + format!("?{}", union) + } else { + union + }; + } + } + + if let Some(ty_str) = ty.as_str() { + return json_type_to_js(ty_str, schema, current_type); + } + } + + if let Some(variants) = schema + .get("anyOf") + .or_else(|| schema.get("oneOf")) + .and_then(|v| v.as_array()) + { + let types: Vec = variants + .iter() + .map(|v| schema_to_js_type(v, current_type)) + .collect(); + let filtered: Vec<_> = types.iter().filter(|t| *t != "*").collect(); + if !filtered.is_empty() { + return format!( + "({})", + filtered + .iter() + .map(|s| s.as_str()) + .collect::>() + .join("|") + ); + } + return format!("({})", types.join("|")); + } + + if let Some(format) = schema.get("format").and_then(|f| f.as_str()) { + return match format { + "int32" | "int64" => "number".to_string(), + "float" | "double" => "number".to_string(), + "date" | "date-time" => "string".to_string(), + _ => "*".to_string(), + }; + } + + "*".to_string() +} diff --git a/crates/brk_bindgen/src/generators/mod.rs b/crates/brk_bindgen/src/generators/mod.rs new file mode 100644 index 000000000..f1863f4ec --- /dev/null +++ b/crates/brk_bindgen/src/generators/mod.rs @@ -0,0 +1,16 @@ +//! Code generators for client libraries. +//! +//! Each language has its own submodule with focused files: +//! - `types.rs` - Type definitions +//! - `client.rs` - Base client and pattern factories +//! - `tree.rs` - Tree structure generation +//! - `api.rs` - API method generation +//! - `mod.rs` - Entry point + +pub mod javascript; +pub mod python; +pub mod rust; + +pub use javascript::generate_javascript_client; +pub use python::generate_python_client; +pub use rust::generate_rust_client; diff --git a/crates/brk_bindgen/src/generators/python/api.rs b/crates/brk_bindgen/src/generators/python/api.rs new file mode 100644 index 000000000..616a81639 --- /dev/null +++ b/crates/brk_bindgen/src/generators/python/api.rs @@ -0,0 +1,151 @@ +//! Python API method generation. + +use std::fmt::Write; + +use crate::{Endpoint, Parameter, escape_python_keyword, to_snake_case}; + +use super::client::generate_class_constants; +use super::types::js_type_to_python; + +/// Generate the main client class +pub fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) { + writeln!(output, "class BrkClient(BrkClientBase):").unwrap(); + writeln!( + output, + " \"\"\"Main BRK client with catalog tree and API methods.\"\"\"" + ) + .unwrap(); + writeln!(output).unwrap(); + + // Generate class-level constants + generate_class_constants(output); + + writeln!( + output, + " def __init__(self, base_url: str = 'http://localhost:3000', timeout: float = 30.0):" + ) + .unwrap(); + writeln!(output, " super().__init__(base_url, timeout)").unwrap(); + writeln!(output, " self.tree = CatalogTree(self)").unwrap(); + writeln!(output).unwrap(); + + // Generate API methods + generate_api_methods(output, endpoints); +} + +/// Generate API methods from OpenAPI endpoints +pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) { + for endpoint in endpoints { + if !endpoint.should_generate() { + continue; + } + + let method_name = endpoint_to_method_name(endpoint); + let return_type = endpoint + .response_type + .as_deref() + .map(js_type_to_python) + .unwrap_or_else(|| "Any".to_string()); + + // Build method signature + let params = build_method_params(endpoint); + writeln!( + output, + " def {}(self{}) -> {}:", + method_name, params, return_type + ) + .unwrap(); + + // Docstring + match (&endpoint.summary, &endpoint.description) { + (Some(summary), Some(desc)) if summary != desc => { + writeln!(output, " \"\"\"{}.", summary.trim_end_matches('.')).unwrap(); + writeln!(output).unwrap(); + writeln!(output, " {}\"\"\"", desc).unwrap(); + } + (Some(summary), _) => { + writeln!(output, " \"\"\"{}\"\"\"", summary).unwrap(); + } + (None, Some(desc)) => { + writeln!(output, " \"\"\"{}\"\"\"", desc).unwrap(); + } + (None, None) => {} + } + + // Build path + let path = build_path_template(&endpoint.path, &endpoint.path_params); + + if endpoint.query_params.is_empty() { + if endpoint.path_params.is_empty() { + writeln!(output, " return self.get('{}')", path).unwrap(); + } else { + writeln!(output, " return self.get(f'{}')", path).unwrap(); + } + } else { + writeln!(output, " params = []").unwrap(); + for param in &endpoint.query_params { + // Use safe name for Python variable, original name for API query parameter + let safe_name = escape_python_keyword(¶m.name); + if param.required { + writeln!( + output, + " params.append(f'{}={{{}}}')", + param.name, safe_name + ) + .unwrap(); + } else { + writeln!( + output, + " if {} is not None: params.append(f'{}={{{}}}')", + safe_name, param.name, safe_name + ) + .unwrap(); + } + } + writeln!(output, " query = '&'.join(params)").unwrap(); + writeln!( + output, + " return self.get(f'{}{{\"?\" + query if query else \"\"}}')", + path + ) + .unwrap(); + } + + writeln!(output).unwrap(); + } +} + +fn endpoint_to_method_name(endpoint: &Endpoint) -> String { + to_snake_case(&endpoint.operation_name()) +} + +fn build_method_params(endpoint: &Endpoint) -> String { + let mut params = Vec::new(); + for param in &endpoint.path_params { + let safe_name = escape_python_keyword(¶m.name); + let py_type = js_type_to_python(¶m.param_type); + params.push(format!(", {}: {}", safe_name, py_type)); + } + for param in &endpoint.query_params { + let safe_name = escape_python_keyword(¶m.name); + let py_type = js_type_to_python(¶m.param_type); + if param.required { + params.push(format!(", {}: {}", safe_name, py_type)); + } else { + params.push(format!(", {}: Optional[{}] = None", safe_name, py_type)); + } + } + params.join("") +} + +fn build_path_template(path: &str, path_params: &[Parameter]) -> String { + let mut result = path.to_string(); + for param in path_params { + let placeholder = format!("{{{}}}", param.name); + // Use escaped name for Python variable interpolation in f-string + let safe_name = escape_python_keyword(¶m.name); + let interpolation = format!("{{{}}}", safe_name); + result = result.replace(&placeholder, &interpolation); + } + result +} diff --git a/crates/brk_bindgen/src/generators/python/client.rs b/crates/brk_bindgen/src/generators/python/client.rs new file mode 100644 index 000000000..589e8635f --- /dev/null +++ b/crates/brk_bindgen/src/generators/python/client.rs @@ -0,0 +1,337 @@ +//! Python base client and pattern factory generation. + +use std::fmt::Write; + +use brk_cohort::{ + AGE_RANGE_NAMES, AMOUNT_RANGE_NAMES, EPOCH_NAMES, GE_AMOUNT_NAMES, LT_AMOUNT_NAMES, + MAX_AGE_NAMES, MIN_AGE_NAMES, SPENDABLE_TYPE_NAMES, TERM_NAMES, YEAR_NAMES, +}; +use brk_types::{pools, Index}; +use serde::Serialize; + +use crate::{ + ClientMetadata, GenericSyntax, IndexSetPattern, PatternField, PythonSyntax, + StructuralPattern, VERSION, generate_parameterized_field, generate_tree_path_field, + index_to_field_name, +}; + +/// Generate class-level constants for the BrkClient class. +pub fn generate_class_constants(output: &mut String) { + fn class_const(output: &mut String, name: &str, value: &T) { + let json = serde_json::to_string_pretty(value).unwrap(); + // Indent all lines for class body + let indented = json + .lines() + .enumerate() + .map(|(i, line)| { + if i == 0 { + format!(" {} = {}", name, line) + } else { + format!(" {}", line) + } + }) + .collect::>() + .join("\n"); + writeln!(output, "{}\n", indented).unwrap(); + } + + // VERSION + writeln!(output, " VERSION = \"v{}\"\n", VERSION).unwrap(); + + // INDEXES + let indexes = Index::all(); + let indexes_list: Vec<&str> = indexes.iter().map(|i| i.serialize_long()).collect(); + class_const(output, "INDEXES", &indexes_list); + + // POOL_ID_TO_POOL_NAME + let pools = pools(); + let mut sorted_pools: Vec<_> = pools.iter().collect(); + sorted_pools.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); + let pool_map: std::collections::BTreeMap = sorted_pools + .iter() + .map(|p| (p.slug().to_string(), p.name)) + .collect(); + class_const(output, "POOL_ID_TO_POOL_NAME", &pool_map); + + // Cohort names + class_const(output, "TERM_NAMES", &TERM_NAMES); + class_const(output, "EPOCH_NAMES", &EPOCH_NAMES); + class_const(output, "YEAR_NAMES", &YEAR_NAMES); + class_const(output, "SPENDABLE_TYPE_NAMES", &SPENDABLE_TYPE_NAMES); + class_const(output, "AGE_RANGE_NAMES", &AGE_RANGE_NAMES); + class_const(output, "MAX_AGE_NAMES", &MAX_AGE_NAMES); + class_const(output, "MIN_AGE_NAMES", &MIN_AGE_NAMES); + class_const(output, "AMOUNT_RANGE_NAMES", &AMOUNT_RANGE_NAMES); + class_const(output, "GE_AMOUNT_NAMES", &GE_AMOUNT_NAMES); + class_const(output, "LT_AMOUNT_NAMES", <_AMOUNT_NAMES); +} + +/// Generate the base BrkClient class with HTTP functionality +pub fn generate_base_client(output: &mut String) { + writeln!( + output, + r#"class BrkError(Exception): + """Custom error class for BRK client errors.""" + + def __init__(self, message: str, status: Optional[int] = None): + super().__init__(message) + self.status = status + + +class BrkClientBase: + """Base HTTP client for making requests.""" + + def __init__(self, base_url: str, timeout: float = 30.0): + self.base_url = base_url + self.timeout = timeout + self._client = httpx.Client(timeout=timeout) + + def get(self, path: str) -> Any: + """Make a GET request.""" + try: + base = self.base_url.rstrip('/') + response = self._client.get(f"{{base}}{{path}}") + response.raise_for_status() + return response.json() + except httpx.HTTPStatusError as e: + raise BrkError(f"HTTP error: {{e.response.status_code}}", e.response.status_code) + except httpx.RequestError as e: + raise BrkError(str(e)) + + def close(self): + """Close the HTTP client.""" + self._client.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + +def _m(acc: str, s: str) -> str: + """Build metric name with optional prefix.""" + return f"{{acc}}_{{s}}" if acc else s + +"# + ) + .unwrap(); +} + +/// Generate the Endpoint class +pub fn generate_endpoint_class(output: &mut String) { + writeln!( + output, + r#"class Endpoint(Generic[T]): + """An endpoint for a specific metric + index combination.""" + + def __init__(self, client: BrkClientBase, name: str, index: str): + self._client = client + self._name = name + self._index = index + + def get(self) -> List[T]: + """Fetch all data points for this metric/index.""" + return self._client.get(self.path()) + + def range(self, from_val: Optional[int] = None, to_val: Optional[int] = None) -> List[T]: + """Fetch data points within a range.""" + params = [] + if from_val is not None: + params.append(f"from={{from_val}}") + if to_val is not None: + params.append(f"to={{to_val}}") + query = "&".join(params) + p = self.path() + return self._client.get(f"{{p}}?{{query}}" if query else p) + + def path(self) -> str: + """Get the endpoint path.""" + return f"/api/metric/{{self._name}}/{{self._index}}" + + +class MetricPattern(Protocol[T]): + """Protocol for metric patterns with different index sets.""" + + @property + def name(self) -> str: + """Get the metric name.""" + ... + + def indexes(self) -> List[str]: + """Get the list of available indexes for this metric.""" + ... + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + ... + +"# + ) + .unwrap(); +} + +/// Generate index accessor classes +pub fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) { + if patterns.is_empty() { + return; + } + + writeln!(output, "# Index accessor classes\n").unwrap(); + + for pattern in patterns { + let by_class_name = format!("_{}By", pattern.name); + + // Generate the By class with lazy endpoint methods + writeln!(output, "class {}(Generic[T]):", by_class_name).unwrap(); + writeln!(output, " \"\"\"Index endpoint methods container.\"\"\"").unwrap(); + writeln!(output, " ").unwrap(); + writeln!( + output, + " def __init__(self, client: BrkClientBase, name: str):" + ) + .unwrap(); + writeln!(output, " self._client = client").unwrap(); + writeln!(output, " self._name = name").unwrap(); + writeln!(output).unwrap(); + + // Generate methods for each index + for index in &pattern.indexes { + let method_name = index_to_field_name(index); + let index_name = index.serialize_long(); + writeln!(output, " def {}(self) -> Endpoint[T]:", method_name).unwrap(); + writeln!( + output, + " return Endpoint(self._client, self._name, '{}')", + index_name + ) + .unwrap(); + writeln!(output).unwrap(); + } + + // Generate the main accessor class + writeln!(output, "class {}(Generic[T]):", pattern.name).unwrap(); + writeln!( + output, + " \"\"\"Index accessor for metrics with {} indexes.\"\"\"", + pattern.indexes.len() + ) + .unwrap(); + writeln!(output, " ").unwrap(); + writeln!( + output, + " def __init__(self, client: BrkClientBase, name: str):" + ) + .unwrap(); + writeln!(output, " self._client = client").unwrap(); + writeln!(output, " self._name = name").unwrap(); + writeln!( + output, + " self.by: {}[T] = {}(client, name)", + by_class_name, by_class_name + ) + .unwrap(); + writeln!(output).unwrap(); + writeln!(output, " @property").unwrap(); + writeln!(output, " def name(self) -> str:").unwrap(); + writeln!(output, " \"\"\"Get the metric name.\"\"\"").unwrap(); + writeln!(output, " return self._name").unwrap(); + writeln!(output).unwrap(); + writeln!(output, " def indexes(self) -> List[str]:").unwrap(); + writeln!(output, " \"\"\"Get the list of available indexes.\"\"\"").unwrap(); + write!(output, " return [").unwrap(); + for (i, index) in pattern.indexes.iter().enumerate() { + if i > 0 { + write!(output, ", ").unwrap(); + } + write!(output, "'{}'", index.serialize_long()).unwrap(); + } + writeln!(output, "]").unwrap(); + writeln!(output).unwrap(); + + // Generate get(index) method + writeln!(output, " def get(self, index: str) -> Optional[Endpoint[T]]:").unwrap(); + writeln!(output, " \"\"\"Get an endpoint for a specific index, if supported.\"\"\"").unwrap(); + for (i, index) in pattern.indexes.iter().enumerate() { + let method_name = index_to_field_name(index); + let index_name = index.serialize_long(); + if i == 0 { + writeln!(output, " if index == '{}': return self.by.{}()", index_name, method_name).unwrap(); + } else { + writeln!(output, " elif index == '{}': return self.by.{}()", index_name, method_name).unwrap(); + } + } + writeln!(output, " return None").unwrap(); + writeln!(output).unwrap(); + } +} + +/// Generate structural pattern classes +pub fn generate_structural_patterns( + output: &mut String, + patterns: &[StructuralPattern], + metadata: &ClientMetadata, +) { + if patterns.is_empty() { + return; + } + + writeln!(output, "# Reusable structural pattern classes\n").unwrap(); + + for pattern in patterns { + let is_parameterizable = pattern.is_parameterizable(); + + // For generic patterns, inherit from Generic[T] + if pattern.is_generic { + writeln!(output, "class {}(Generic[T]):", pattern.name).unwrap(); + } else { + writeln!(output, "class {}:", pattern.name).unwrap(); + } + writeln!( + output, + " \"\"\"Pattern struct for repeated tree structure.\"\"\"" + ) + .unwrap(); + writeln!(output, " ").unwrap(); + + if is_parameterizable { + writeln!( + output, + " def __init__(self, client: BrkClientBase, acc: str):" + ) + .unwrap(); + writeln!( + output, + " \"\"\"Create pattern node with accumulated metric name.\"\"\"" + ) + .unwrap(); + } else { + writeln!( + output, + " def __init__(self, client: BrkClientBase, base_path: str):" + ) + .unwrap(); + } + + let syntax = PythonSyntax; + for field in &pattern.fields { + if is_parameterizable { + generate_parameterized_field(output, &syntax, field, pattern, metadata, " "); + } else { + generate_tree_path_field(output, &syntax, field, metadata, " "); + } + } + + writeln!(output).unwrap(); + } +} + +/// Get Python type annotation for a field with optional generic value type. +pub fn field_type_with_generic( + field: &PatternField, + metadata: &ClientMetadata, + is_generic: bool, + generic_value_type: Option<&str>, +) -> String { + metadata.field_type_annotation(field, is_generic, generic_value_type, GenericSyntax::PYTHON) +} diff --git a/crates/brk_bindgen/src/generators/python/mod.rs b/crates/brk_bindgen/src/generators/python/mod.rs new file mode 100644 index 000000000..b0257828e --- /dev/null +++ b/crates/brk_bindgen/src/generators/python/mod.rs @@ -0,0 +1,47 @@ +//! Python client generation. +//! +//! This module generates a Python client with type hints for the BRK API. + +mod api; +mod client; +mod tree; +mod types; + +use std::{fmt::Write, fs, io, path::Path}; + +use crate::{ClientMetadata, Endpoint, TypeSchemas}; + +/// Generate Python client from metadata and OpenAPI endpoints. +/// +/// `output_path` is the full path to the output file (e.g., "packages/brk_client/__init__.py"). +pub fn generate_python_client( + metadata: &ClientMetadata, + endpoints: &[Endpoint], + schemas: &TypeSchemas, + output_path: &Path, +) -> io::Result<()> { + let mut output = String::new(); + + writeln!(output, "# Auto-generated BRK Python client").unwrap(); + writeln!(output, "# Do not edit manually\n").unwrap(); + writeln!(output, "from __future__ import annotations").unwrap(); + writeln!( + output, + "from typing import TypeVar, Generic, Any, Optional, List, Literal, TypedDict, Final, Union, Protocol" + ) + .unwrap(); + writeln!(output, "import httpx\n").unwrap(); + writeln!(output, "T = TypeVar('T')\n").unwrap(); + + types::generate_type_definitions(&mut output, schemas); + client::generate_base_client(&mut output); + client::generate_endpoint_class(&mut output); + client::generate_index_accessors(&mut output, &metadata.index_set_patterns); + client::generate_structural_patterns(&mut output, &metadata.structural_patterns, metadata); + tree::generate_tree_classes(&mut output, &metadata.catalog, metadata); + api::generate_main_client(&mut output, endpoints); + + fs::write(output_path, output)?; + + Ok(()) +} diff --git a/crates/brk_bindgen/src/generators/python/tree.rs b/crates/brk_bindgen/src/generators/python/tree.rs new file mode 100644 index 000000000..e5dd67855 --- /dev/null +++ b/crates/brk_bindgen/src/generators/python/tree.rs @@ -0,0 +1,146 @@ +//! Python tree structure generation. + +use std::collections::HashSet; +use std::fmt::Write; + +use brk_types::TreeNode; + +use crate::{ + ClientMetadata, PatternField, child_type_name, get_fields_with_child_info, get_node_fields, + get_pattern_instance_base, to_snake_case, +}; + +use super::client::field_type_with_generic; + +/// Generate tree classes +pub fn generate_tree_classes(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) { + writeln!(output, "# Catalog tree classes\n").unwrap(); + + let pattern_lookup = metadata.pattern_lookup(); + let mut generated = HashSet::new(); + generate_tree_class( + output, + "CatalogTree", + catalog, + &pattern_lookup, + metadata, + &mut generated, + ); +} + +/// Recursively generate tree classes +fn generate_tree_class( + output: &mut String, + name: &str, + node: &TreeNode, + pattern_lookup: &std::collections::HashMap, String>, + metadata: &ClientMetadata, + generated: &mut HashSet, +) { + let TreeNode::Branch(children) = node else { + return; + }; + + let fields_with_child_info = get_fields_with_child_info(children, name, pattern_lookup); + let fields: Vec = fields_with_child_info + .iter() + .map(|(f, _)| f.clone()) + .collect(); + + // Skip if this matches a pattern (already generated) + if pattern_lookup.contains_key(&fields) + && pattern_lookup.get(&fields) != Some(&name.to_string()) + { + return; + } + + if generated.contains(name) { + return; + } + generated.insert(name.to_string()); + + writeln!(output, "class {}:", name).unwrap(); + writeln!(output, " \"\"\"Catalog tree node.\"\"\"").unwrap(); + writeln!(output, " ").unwrap(); + writeln!( + output, + " def __init__(self, client: BrkClientBase, base_path: str = ''):" + ) + .unwrap(); + + for ((field, child_fields_opt), (_child_name, child_node)) in + fields_with_child_info.iter().zip(children.iter()) + { + // Look up type parameter for generic patterns + let generic_value_type = child_fields_opt + .as_ref() + .and_then(|cf| metadata.get_type_param(cf)) + .map(String::as_str); + let py_type = field_type_with_generic(field, metadata, false, generic_value_type); + let field_name_py = to_snake_case(&field.name); + + if metadata.is_pattern_type(&field.rust_type) { + let pattern = metadata.find_pattern(&field.rust_type); + let is_parameterizable = pattern.is_some_and(|p| p.is_parameterizable()); + + if is_parameterizable { + let metric_base = get_pattern_instance_base(child_node); + writeln!( + output, + " self.{}: {} = {}(client, '{}')", + field_name_py, py_type, field.rust_type, metric_base + ) + .unwrap(); + } else { + writeln!( + output, + " self.{}: {} = {}(client, f'{{base_path}}_{}')", + field_name_py, py_type, field.rust_type, field.name + ) + .unwrap(); + } + } else if metadata.field_uses_accessor(field) { + let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); + writeln!( + output, + " self.{}: {} = {}(client, f'{{base_path}}_{}')", + field_name_py, py_type, accessor.name, field.name + ) + .unwrap(); + } else if field.is_branch() { + // Non-pattern branch - instantiate the nested class + writeln!( + output, + " self.{}: {} = {}(client, f'{{base_path}}_{}')", + field_name_py, py_type, field.rust_type, field.name + ) + .unwrap(); + } else { + // All metrics must be indexed - this should not be reached + panic!( + "Field '{}' has no matching index pattern. All metrics must be indexed.", + field.name + ); + } + } + + writeln!(output).unwrap(); + + // Generate child classes + for (child_name, child_node) in children { + if let TreeNode::Branch(grandchildren) = child_node { + let child_fields = get_node_fields(grandchildren, pattern_lookup); + if !pattern_lookup.contains_key(&child_fields) { + let child_class = child_type_name(name, child_name); + generate_tree_class( + output, + &child_class, + child_node, + pattern_lookup, + metadata, + generated, + ); + } + } + } +} diff --git a/crates/brk_bindgen/src/generators/python/types.rs b/crates/brk_bindgen/src/generators/python/types.rs new file mode 100644 index 000000000..8c411e904 --- /dev/null +++ b/crates/brk_bindgen/src/generators/python/types.rs @@ -0,0 +1,268 @@ +//! Python type definitions generation. + +use std::collections::{HashMap, HashSet}; +use std::fmt::Write; + +use serde_json::Value; + +use crate::{TypeSchemas, escape_python_keyword, ref_to_type_name}; + +/// Generate type definitions from schemas. +pub fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) { + if schemas.is_empty() { + return; + } + + writeln!(output, "# Type definitions\n").unwrap(); + + let sorted_names = topological_sort_schemas(schemas); + + for name in sorted_names { + let Some(schema) = schemas.get(&name) else { + continue; + }; + if let Some(props) = schema.get("properties").and_then(|p| p.as_object()) { + writeln!(output, "class {}(TypedDict):", name).unwrap(); + for (prop_name, prop_schema) in props { + let prop_type = schema_to_python_type_ctx(prop_schema, Some(&name)); + let safe_name = escape_python_keyword(prop_name); + writeln!(output, " {}: {}", safe_name, prop_type).unwrap(); + } + writeln!(output).unwrap(); + } else { + let py_type = schema_to_python_type_ctx(schema, Some(&name)); + writeln!(output, "{} = {}", name, py_type).unwrap(); + } + } + writeln!(output).unwrap(); +} + +/// Topologically sort schema names so dependencies come before dependents (avoids forward references). +/// Types that reference other types (via $ref) must be defined after their dependencies. +fn topological_sort_schemas(schemas: &TypeSchemas) -> Vec { + // Build dependency graph + let mut deps: HashMap> = HashMap::new(); + for (name, schema) in schemas { + let mut type_deps = HashSet::new(); + collect_schema_refs(schema, &mut type_deps); + // Only keep deps that are in our schemas + type_deps.retain(|d| schemas.contains_key(d)); + deps.insert(name.clone(), type_deps); + } + + // Kahn's algorithm for topological sort + let mut in_degree: HashMap = HashMap::new(); + for name in schemas.keys() { + in_degree.insert(name.clone(), 0); + } + for type_deps in deps.values() { + for dep in type_deps { + *in_degree.entry(dep.clone()).or_insert(0) += 1; + } + } + + // Start with types that have no dependents (are not referenced by others) + let mut queue: Vec = in_degree + .iter() + .filter(|(_, count)| **count == 0) + .map(|(name, _)| name.clone()) + .collect(); + queue.sort(); // Deterministic order + + let mut result = Vec::new(); + while let Some(name) = queue.pop() { + result.push(name.clone()); + if let Some(type_deps) = deps.get(&name) { + for dep in type_deps { + if let Some(count) = in_degree.get_mut(dep) { + *count = count.saturating_sub(1); + if *count == 0 { + queue.push(dep.clone()); + queue.sort(); // Keep sorted for determinism + } + } + } + } + } + + // Reverse so dependencies come first + result.reverse(); + + // Add any types that weren't processed (e.g., due to circular refs or other edge cases) + let result_set: HashSet<_> = result.iter().cloned().collect(); + let mut missing: Vec<_> = schemas + .keys() + .filter(|k| !result_set.contains(*k)) + .cloned() + .collect(); + missing.sort(); + result.extend(missing); + + result +} + +/// Collect all type references ($ref) from a schema +fn collect_schema_refs(schema: &Value, refs: &mut HashSet) { + match schema { + Value::Object(map) => { + if let Some(ref_path) = map.get("$ref").and_then(|r| r.as_str()) + && let Some(type_name) = ref_to_type_name(ref_path) + { + refs.insert(type_name.to_string()); + } + for value in map.values() { + collect_schema_refs(value, refs); + } + } + Value::Array(arr) => { + for item in arr { + collect_schema_refs(item, refs); + } + } + _ => {} + } +} + +/// Convert a single JSON type string to Python type +fn json_type_to_python(ty: &str, schema: &Value, current_type: Option<&str>) -> String { + match ty { + "integer" => "int".to_string(), + "number" => "float".to_string(), + "boolean" => "bool".to_string(), + "string" => "str".to_string(), + "null" => "None".to_string(), + "array" => { + let item_type = schema + .get("items") + .map(|s| schema_to_python_type_ctx(s, current_type)) + .unwrap_or_else(|| "Any".to_string()); + format!("List[{}]", item_type) + } + "object" => { + if let Some(add_props) = schema.get("additionalProperties") { + let value_type = schema_to_python_type_ctx(add_props, current_type); + return format!("dict[str, {}]", value_type); + } + "dict".to_string() + } + _ => "Any".to_string(), + } +} + +/// Convert JSON Schema to Python type with context for detecting self-references +pub fn schema_to_python_type_ctx(schema: &Value, current_type: Option<&str>) -> String { + if let Some(all_of) = schema.get("allOf").and_then(|v| v.as_array()) { + for item in all_of { + let resolved = schema_to_python_type_ctx(item, current_type); + if resolved != "Any" { + return resolved; + } + } + } + + // Handle $ref + if let Some(ref_path) = schema.get("$ref").and_then(|r| r.as_str()) { + let type_name = ref_to_type_name(ref_path).unwrap_or("Any"); + // Quote self-references to handle recursive types + if current_type == Some(type_name) { + return format!("\"{}\"", type_name); + } + return type_name.to_string(); + } + + // Handle enum (array of string values) + if let Some(enum_values) = schema.get("enum").and_then(|e| e.as_array()) { + let literals: Vec = enum_values + .iter() + .filter_map(|v| v.as_str()) + .map(|s| format!("\"{}\"", s)) + .collect(); + if !literals.is_empty() { + return format!("Literal[{}]", literals.join(", ")); + } + } + + if let Some(ty) = schema.get("type") { + if let Some(type_array) = ty.as_array() { + let types: Vec = type_array + .iter() + .filter_map(|t| t.as_str()) + .filter(|t| *t != "null") // Filter out null for cleaner Optional handling + .map(|t| json_type_to_python(t, schema, current_type)) + .collect(); + let has_null = type_array.iter().any(|t| t.as_str() == Some("null")); + + if types.len() == 1 { + let base_type = &types[0]; + return if has_null { + format!("Optional[{}]", base_type) + } else { + base_type.clone() + }; + } else if !types.is_empty() { + let union = format!("Union[{}]", types.join(", ")); + return if has_null { + format!("Optional[{}]", union) + } else { + union + }; + } + } + + if let Some(ty_str) = ty.as_str() { + return json_type_to_python(ty_str, schema, current_type); + } + } + + if let Some(variants) = schema + .get("anyOf") + .or_else(|| schema.get("oneOf")) + .and_then(|v| v.as_array()) + { + let types: Vec = variants + .iter() + .map(|v| schema_to_python_type_ctx(v, current_type)) + .collect(); + let filtered: Vec<_> = types.iter().filter(|t| *t != "Any").collect(); + if !filtered.is_empty() { + return format!( + "Union[{}]", + filtered + .iter() + .map(|s| s.as_str()) + .collect::>() + .join(", ") + ); + } + return format!("Union[{}]", types.join(", ")); + } + + // Check for format hint without type (common in OpenAPI) + if let Some(format) = schema.get("format").and_then(|f| f.as_str()) { + return match format { + "int32" | "int64" => "int".to_string(), + "float" | "double" => "float".to_string(), + "date" | "date-time" => "str".to_string(), + _ => "Any".to_string(), + }; + } + + "Any".to_string() +} + +/// Convert JS-style type to Python type (e.g., "Txid[]" -> "List[Txid]", "number" -> "int") +pub fn js_type_to_python(js_type: &str) -> String { + if let Some(inner) = js_type.strip_suffix("[]") { + format!("List[{}]", js_type_to_python(inner)) + } else { + match js_type { + "number" => "int".to_string(), + "boolean" => "bool".to_string(), + "string" => "str".to_string(), + "null" => "None".to_string(), + "Object" | "object" => "dict".to_string(), + "*" => "Any".to_string(), + _ => js_type.to_string(), + } + } +} diff --git a/crates/brk_bindgen/src/generators/rust/api.rs b/crates/brk_bindgen/src/generators/rust/api.rs new file mode 100644 index 000000000..28add2875 --- /dev/null +++ b/crates/brk_bindgen/src/generators/rust/api.rs @@ -0,0 +1,144 @@ +//! Rust API method generation. + +use std::fmt::Write; + +use crate::{Endpoint, VERSION, to_snake_case}; + +use super::types::js_type_to_rust; + +/// Generate the main BrkClient struct. +pub fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) { + writeln!( + output, + r#"/// Main BRK client with catalog tree and API methods. +pub struct BrkClient {{ + base: Arc, + tree: CatalogTree, +}} + +impl BrkClient {{ + /// Client version. + pub const VERSION: &'static str = "v{VERSION}"; + + /// Create a new client with the given base URL. + pub fn new(base_url: impl Into) -> Self {{ + let base = Arc::new(BrkClientBase::new(base_url)); + let tree = CatalogTree::new(base.clone(), String::new()); + Self {{ base, tree }} + }} + + /// Create a new client with options. + pub fn with_options(options: BrkClientOptions) -> Self {{ + let base = Arc::new(BrkClientBase::with_options(options)); + let tree = CatalogTree::new(base.clone(), String::new()); + Self {{ base, tree }} + }} + + /// Get the catalog tree for navigating metrics. + pub fn tree(&self) -> &CatalogTree {{ + &self.tree + }} +"#, + VERSION = VERSION + ) + .unwrap(); + + generate_api_methods(output, endpoints); + + writeln!(output, "}}").unwrap(); +} + +/// Generate API methods from OpenAPI endpoints. +pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) { + for endpoint in endpoints { + if !endpoint.should_generate() { + continue; + } + + let method_name = endpoint_to_method_name(endpoint); + let return_type = endpoint + .response_type + .as_deref() + .map(js_type_to_rust) + .unwrap_or_else(|| "serde_json::Value".to_string()); + + writeln!( + output, + " /// {}", + endpoint.summary.as_deref().unwrap_or(&method_name) + ) + .unwrap(); + if let Some(desc) = &endpoint.description + && endpoint.summary.as_ref() != Some(desc) + { + writeln!(output, " ///").unwrap(); + writeln!(output, " /// {}", desc).unwrap(); + } + + let params = build_method_params(endpoint); + writeln!( + output, + " pub fn {}(&self{}) -> Result<{}> {{", + method_name, params, return_type + ) + .unwrap(); + + let path = build_path_template(&endpoint.path); + + if endpoint.query_params.is_empty() { + writeln!(output, " self.base.get(&format!(\"{}\"))", path).unwrap(); + } else { + writeln!(output, " let mut query = Vec::new();").unwrap(); + for param in &endpoint.query_params { + if param.required { + writeln!( + output, + " query.push(format!(\"{}={{}}\", {}));", + param.name, param.name + ) + .unwrap(); + } else { + writeln!( + output, + " if let Some(v) = {} {{ query.push(format!(\"{}={{}}\", v)); }}", + param.name, param.name + ) + .unwrap(); + } + } + writeln!(output, " let query_str = if query.is_empty() {{ String::new() }} else {{ format!(\"?{{}}\", query.join(\"&\")) }};").unwrap(); + writeln!( + output, + " self.base.get(&format!(\"{}{{}}\", query_str))", + path + ) + .unwrap(); + } + + writeln!(output, " }}\n").unwrap(); + } +} + +fn endpoint_to_method_name(endpoint: &Endpoint) -> String { + to_snake_case(&endpoint.operation_name()) +} + +fn build_method_params(endpoint: &Endpoint) -> String { + let mut params = Vec::new(); + for param in &endpoint.path_params { + params.push(format!(", {}: &str", param.name)); + } + for param in &endpoint.query_params { + if param.required { + params.push(format!(", {}: &str", param.name)); + } else { + params.push(format!(", {}: Option<&str>", param.name)); + } + } + params.join("") +} + +/// OpenAPI path placeholders `{param}` are already valid Rust format string syntax. +fn build_path_template(path: &str) -> &str { + path +} diff --git a/crates/brk_bindgen/src/generators/rust/client.rs b/crates/brk_bindgen/src/generators/rust/client.rs new file mode 100644 index 000000000..2fec2429d --- /dev/null +++ b/crates/brk_bindgen/src/generators/rust/client.rs @@ -0,0 +1,380 @@ +//! Rust base client and pattern factory generation. + +use std::fmt::Write; + +use crate::{ + ClientMetadata, GenericSyntax, IndexSetPattern, PatternField, RustSyntax, + StructuralPattern, generate_parameterized_field, generate_tree_path_field, + index_to_field_name, to_snake_case, +}; + +/// Generate import statements. +pub fn generate_imports(output: &mut String) { + writeln!( + output, + r#"use std::sync::Arc; +use serde::de::DeserializeOwned; +pub use brk_cohort::*; +pub use brk_types::*; + +"# + ) + .unwrap(); +} + +/// Generate the base BrkClientBase struct and error types. +pub fn generate_base_client(output: &mut String) { + writeln!( + output, + r#"/// Error type for BRK client operations. +#[derive(Debug)] +pub struct BrkError {{ + pub message: String, +}} + +impl std::fmt::Display for BrkError {{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {{ + write!(f, "{{}}", self.message) + }} +}} + +impl std::error::Error for BrkError {{}} + +/// Result type for BRK client operations. +pub type Result = std::result::Result; + +/// Options for configuring the BRK client. +#[derive(Debug, Clone)] +pub struct BrkClientOptions {{ + pub base_url: String, + pub timeout_secs: u64, +}} + +impl Default for BrkClientOptions {{ + fn default() -> Self {{ + Self {{ + base_url: "http://localhost:3000".to_string(), + timeout_secs: 30, + }} + }} +}} + +/// Base HTTP client for making requests. +#[derive(Debug, Clone)] +pub struct BrkClientBase {{ + base_url: String, + timeout_secs: u64, +}} + +impl BrkClientBase {{ + /// Create a new client with the given base URL. + pub fn new(base_url: impl Into) -> Self {{ + Self {{ + base_url: base_url.into(), + timeout_secs: 30, + }} + }} + + /// Create a new client with options. + pub fn with_options(options: BrkClientOptions) -> Self {{ + Self {{ + base_url: options.base_url, + timeout_secs: options.timeout_secs, + }} + }} + + /// Make a GET request. + pub fn get(&self, path: &str) -> Result {{ + let base = self.base_url.trim_end_matches('/'); + let url = format!("{{}}{{}}", base, path); + let response = minreq::get(&url) + .with_timeout(self.timeout_secs) + .send() + .map_err(|e| BrkError {{ message: e.to_string() }})?; + + if response.status_code >= 400 {{ + return Err(BrkError {{ + message: format!("HTTP {{}}", response.status_code), + }}); + }} + + response + .json() + .map_err(|e| BrkError {{ message: e.to_string() }}) + }} +}} + +/// Build metric name with optional prefix. +#[inline] +fn _m(acc: &str, s: &str) -> String {{ + if acc.is_empty() {{ s.to_string() }} else {{ format!("{{acc}}_{{s}}") }} +}} + +"# + ) + .unwrap(); +} + +/// Generate the MetricPattern trait. +pub fn generate_metric_pattern_trait(output: &mut String) { + writeln!( + output, + r#"/// Non-generic trait for metric patterns (usable in collections). +pub trait AnyMetricPattern {{ + /// Get the metric name. + fn name(&self) -> &str; + + /// Get the list of available indexes for this metric. + fn indexes(&self) -> &'static [Index]; +}} + +/// Generic trait for metric patterns with endpoint access. +pub trait MetricPattern: AnyMetricPattern {{ + /// Get an endpoint for a specific index, if supported. + fn get(&self, index: Index) -> Option>; +}} + +"# + ) + .unwrap(); +} + +/// Generate the Endpoint struct. +pub fn generate_endpoint(output: &mut String) { + writeln!( + output, + r#"/// An endpoint for a specific metric + index combination. +pub struct Endpoint {{ + client: Arc, + name: Arc, + index: Index, + _marker: std::marker::PhantomData, +}} + +impl Endpoint {{ + pub fn new(client: Arc, name: Arc, index: Index) -> Self {{ + Self {{ + client, + name, + index, + _marker: std::marker::PhantomData, + }} + }} + + /// Fetch all data points for this metric/index. + pub fn get(&self) -> Result> {{ + self.client.get(&self.path()) + }} + + /// Fetch data points within a range. + pub fn range(&self, from: Option, to: Option) -> Result> {{ + let mut params = Vec::new(); + if let Some(f) = from {{ params.push(format!("from={{}}", f)); }} + if let Some(t) = to {{ params.push(format!("to={{}}", t)); }} + let p = self.path(); + let path = if params.is_empty() {{ + p + }} else {{ + format!("{{}}?{{}}", p, params.join("&")) + }}; + self.client.get(&path) + }} + + /// Get the endpoint path. + pub fn path(&self) -> String {{ + format!("/api/metric/{{}}/{{}}", self.name, self.index.serialize_long()) + }} +}} + +"# + ) + .unwrap(); +} + +/// Generate index accessor structs. +pub fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) { + if patterns.is_empty() { + return; + } + + writeln!(output, "// Index accessor structs\n").unwrap(); + + for pattern in patterns { + let by_name = format!("{}By", pattern.name); + + // Generate the "By" struct with lazy endpoint methods + writeln!(output, "/// Container for index endpoint methods.").unwrap(); + writeln!(output, "pub struct {} {{", by_name).unwrap(); + writeln!(output, " client: Arc,").unwrap(); + writeln!(output, " name: Arc,").unwrap(); + writeln!(output, " _marker: std::marker::PhantomData,").unwrap(); + writeln!(output, "}}\n").unwrap(); + + // Generate impl with methods for each index + writeln!(output, "impl {} {{", by_name).unwrap(); + for index in &pattern.indexes { + let method_name = index_to_field_name(index); + writeln!(output, " pub fn {}(&self) -> Endpoint {{", method_name).unwrap(); + writeln!( + output, + " Endpoint::new(self.client.clone(), self.name.clone(), Index::{})", + index + ) + .unwrap(); + writeln!(output, " }}").unwrap(); + } + writeln!(output, "}}\n").unwrap(); + + // Generate the main accessor struct + writeln!( + output, + "/// Index accessor for metrics with {} indexes.", + pattern.indexes.len() + ) + .unwrap(); + writeln!(output, "pub struct {} {{", pattern.name).unwrap(); + writeln!(output, " client: Arc,").unwrap(); + writeln!(output, " name: Arc,").unwrap(); + writeln!(output, " pub by: {},", by_name).unwrap(); + writeln!(output, "}}\n").unwrap(); + + // Generate impl block with constructor + writeln!(output, "impl {} {{", pattern.name).unwrap(); + writeln!( + output, + " pub fn new(client: Arc, name: String) -> Self {{" + ) + .unwrap(); + writeln!(output, " let name: Arc = name.into();").unwrap(); + writeln!(output, " Self {{").unwrap(); + writeln!(output, " client: client.clone(),").unwrap(); + writeln!(output, " name: name.clone(),").unwrap(); + writeln!(output, " by: {} {{", by_name).unwrap(); + writeln!(output, " client,").unwrap(); + writeln!(output, " name,").unwrap(); + writeln!(output, " _marker: std::marker::PhantomData,").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output).unwrap(); + writeln!(output, " /// Get the metric name.").unwrap(); + writeln!(output, " pub fn name(&self) -> &str {{").unwrap(); + writeln!(output, " &self.name").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, "}}\n").unwrap(); + + // Implement AnyMetricPattern trait + writeln!(output, "impl AnyMetricPattern for {} {{", pattern.name).unwrap(); + writeln!(output, " fn name(&self) -> &str {{").unwrap(); + writeln!(output, " &self.name").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output).unwrap(); + writeln!(output, " fn indexes(&self) -> &'static [Index] {{").unwrap(); + writeln!(output, " &[").unwrap(); + for index in &pattern.indexes { + writeln!(output, " Index::{},", index).unwrap(); + } + writeln!(output, " ]").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, "}}\n").unwrap(); + + // Implement MetricPattern trait + writeln!(output, "impl MetricPattern for {} {{", pattern.name).unwrap(); + writeln!(output, " fn get(&self, index: Index) -> Option> {{").unwrap(); + writeln!(output, " match index {{").unwrap(); + for index in &pattern.indexes { + let method_name = index_to_field_name(index); + writeln!(output, " Index::{} => Some(self.by.{}()),", index, method_name).unwrap(); + } + writeln!(output, " _ => None,").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, "}}\n").unwrap(); + } +} + +/// Generate structural pattern structs. +pub fn generate_pattern_structs( + output: &mut String, + patterns: &[StructuralPattern], + metadata: &ClientMetadata, +) { + if patterns.is_empty() { + return; + } + + writeln!(output, "// Reusable pattern structs\n").unwrap(); + + for pattern in patterns { + let is_parameterizable = pattern.is_parameterizable(); + let generic_params = if pattern.is_generic { "" } else { "" }; + + writeln!(output, "/// Pattern struct for repeated tree structure.").unwrap(); + writeln!(output, "pub struct {}{} {{", pattern.name, generic_params).unwrap(); + + for field in &pattern.fields { + let field_name = to_snake_case(&field.name); + let type_annotation = + field_type_with_generic(field, metadata, pattern.is_generic, None); + writeln!(output, " pub {}: {},", field_name, type_annotation).unwrap(); + } + + writeln!(output, "}}\n").unwrap(); + + // Generate impl block with constructor + let impl_generic = if pattern.is_generic { + "" + } else { + "" + }; + writeln!( + output, + "impl{} {}{} {{", + impl_generic, pattern.name, generic_params + ) + .unwrap(); + + if is_parameterizable { + writeln!( + output, + " /// Create a new pattern node with accumulated metric name." + ) + .unwrap(); + writeln!( + output, + " pub fn new(client: Arc, acc: String) -> Self {{" + ) + .unwrap(); + } else { + writeln!( + output, + " pub fn new(client: Arc, base_path: String) -> Self {{" + ) + .unwrap(); + } + writeln!(output, " Self {{").unwrap(); + + let syntax = RustSyntax; + for field in &pattern.fields { + if is_parameterizable { + generate_parameterized_field(output, &syntax, field, pattern, metadata, " "); + } else { + generate_tree_path_field(output, &syntax, field, metadata, " "); + } + } + + writeln!(output, " }}").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, "}}\n").unwrap(); + } +} + +/// Get Rust type annotation for a field with optional generic value type. +pub fn field_type_with_generic( + field: &PatternField, + metadata: &ClientMetadata, + is_generic: bool, + generic_value_type: Option<&str>, +) -> String { + metadata.field_type_annotation(field, is_generic, generic_value_type, GenericSyntax::RUST) +} diff --git a/crates/brk_bindgen/src/generators/rust/mod.rs b/crates/brk_bindgen/src/generators/rust/mod.rs new file mode 100644 index 000000000..b3a880d40 --- /dev/null +++ b/crates/brk_bindgen/src/generators/rust/mod.rs @@ -0,0 +1,44 @@ +//! Rust client generation. +//! +//! This module generates a Rust client with full type safety for the BRK API. + +mod api; +mod client; +mod tree; +mod types; + +use std::{fmt::Write, fs, io, path::Path}; + +use crate::{ClientMetadata, Endpoint}; + +/// Generate Rust client from metadata and OpenAPI endpoints. +/// +/// `output_path` is the full path to the output file (e.g., "crates/brk_client/src/lib.rs"). +pub fn generate_rust_client( + metadata: &ClientMetadata, + endpoints: &[Endpoint], + output_path: &Path, +) -> io::Result<()> { + let mut output = String::new(); + + writeln!(output, "// Auto-generated BRK Rust client").unwrap(); + writeln!(output, "// Do not edit manually\n").unwrap(); + writeln!(output, "#![allow(non_camel_case_types)]").unwrap(); + writeln!(output, "#![allow(dead_code)]").unwrap(); + writeln!(output, "#![allow(unused_variables)]").unwrap(); + writeln!(output, "#![allow(clippy::useless_format)]").unwrap(); + writeln!(output, "#![allow(clippy::unnecessary_to_owned)]\n").unwrap(); + + client::generate_imports(&mut output); + client::generate_base_client(&mut output); + client::generate_metric_pattern_trait(&mut output); + client::generate_endpoint(&mut output); + client::generate_index_accessors(&mut output, &metadata.index_set_patterns); + client::generate_pattern_structs(&mut output, &metadata.structural_patterns, metadata); + tree::generate_tree(&mut output, &metadata.catalog, metadata); + api::generate_main_client(&mut output, endpoints); + + fs::write(output_path, output)?; + + Ok(()) +} diff --git a/crates/brk_bindgen/src/generators/rust/tree.rs b/crates/brk_bindgen/src/generators/rust/tree.rs new file mode 100644 index 000000000..021a09aa8 --- /dev/null +++ b/crates/brk_bindgen/src/generators/rust/tree.rs @@ -0,0 +1,120 @@ +//! Rust tree structure generation. + +use std::collections::HashSet; +use std::fmt::Write; + +use brk_types::TreeNode; + +use crate::{ + ClientMetadata, PatternField, RustSyntax, child_type_name, generate_tree_node_field, + get_fields_with_child_info, get_node_fields, get_pattern_instance_base, to_snake_case, +}; + +use super::client::field_type_with_generic; + +/// Generate tree structs. +pub fn generate_tree(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) { + writeln!(output, "// Catalog tree\n").unwrap(); + + let pattern_lookup = metadata.pattern_lookup(); + let mut generated = HashSet::new(); + generate_tree_node( + output, + "CatalogTree", + catalog, + &pattern_lookup, + metadata, + &mut generated, + ); +} + +fn generate_tree_node( + output: &mut String, + name: &str, + node: &TreeNode, + pattern_lookup: &std::collections::HashMap, String>, + metadata: &ClientMetadata, + generated: &mut HashSet, +) { + let TreeNode::Branch(children) = node else { + return; + }; + + let fields_with_child_info = get_fields_with_child_info(children, name, pattern_lookup); + let fields: Vec = fields_with_child_info + .iter() + .map(|(f, _)| f.clone()) + .collect(); + + if let Some(pattern_name) = pattern_lookup.get(&fields) + && pattern_name != name + { + return; + } + + if generated.contains(name) { + return; + } + generated.insert(name.to_string()); + + writeln!(output, "/// Catalog tree node.").unwrap(); + writeln!(output, "pub struct {} {{", name).unwrap(); + + for (field, child_fields) in &fields_with_child_info { + let field_name = to_snake_case(&field.name); + // Look up type parameter for generic patterns + let generic_value_type = child_fields + .as_ref() + .and_then(|cf| metadata.get_type_param(cf)) + .map(String::as_str); + let type_annotation = field_type_with_generic(field, metadata, false, generic_value_type); + writeln!(output, " pub {}: {},", field_name, type_annotation).unwrap(); + } + + writeln!(output, "}}\n").unwrap(); + + writeln!(output, "impl {} {{", name).unwrap(); + writeln!( + output, + " pub fn new(client: Arc, base_path: String) -> Self {{" + ) + .unwrap(); + writeln!(output, " Self {{").unwrap(); + + let syntax = RustSyntax; + for (field, (child_name, child_node)) in fields.iter().zip(children.iter()) { + // Detect pattern base for parameterizable patterns + let pattern_base = if metadata.is_pattern_type(&field.rust_type) { + let pattern = metadata.find_pattern(&field.rust_type); + if pattern.is_some_and(|p| p.is_parameterizable()) { + Some(get_pattern_instance_base(child_node)) + } else { + None + } + } else { + None + }; + generate_tree_node_field(output, &syntax, field, metadata, " ", child_name, pattern_base.as_deref()); + } + + writeln!(output, " }}").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, "}}\n").unwrap(); + + for (child_name, child_node) in children { + if let TreeNode::Branch(grandchildren) = child_node { + let child_fields = get_node_fields(grandchildren, pattern_lookup); + if !pattern_lookup.contains_key(&child_fields) { + let child_struct = child_type_name(name, child_name); + generate_tree_node( + output, + &child_struct, + child_node, + pattern_lookup, + metadata, + generated, + ); + } + } + } +} diff --git a/crates/brk_bindgen/src/generators/rust/types.rs b/crates/brk_bindgen/src/generators/rust/types.rs new file mode 100644 index 000000000..bc6cb0026 --- /dev/null +++ b/crates/brk_bindgen/src/generators/rust/types.rs @@ -0,0 +1,16 @@ +//! Rust type conversion utilities. + +/// Convert JS-style type to Rust type. +pub fn js_type_to_rust(js_type: &str) -> String { + if let Some(inner) = js_type.strip_suffix("[]") { + format!("Vec<{}>", js_type_to_rust(inner)) + } else { + match js_type { + "string" => "String".to_string(), + "number" => "f64".to_string(), + "boolean" => "bool".to_string(), + "*" => "serde_json::Value".to_string(), + other => other.to_string(), + } + } +} diff --git a/crates/brk_binder/src/lib.rs b/crates/brk_bindgen/src/lib.rs similarity index 88% rename from crates/brk_binder/src/lib.rs rename to crates/brk_bindgen/src/lib.rs index b94f6ba21..f65204736 100644 --- a/crates/brk_binder/src/lib.rs +++ b/crates/brk_bindgen/src/lib.rs @@ -47,16 +47,20 @@ impl ClientOutputPaths { } } -mod javascript; +mod analysis; +mod backends; +mod generate; +mod generators; mod openapi; -mod python; -mod rust; +mod syntax; mod types; -pub use javascript::*; +pub use analysis::*; +pub use backends::*; +pub use generate::*; +pub use generators::{generate_javascript_client, generate_python_client, generate_rust_client}; pub use openapi::*; -pub use python::*; -pub use rust::*; +pub use syntax::*; pub use types::*; pub const VERSION: &str = env!("CARGO_PKG_VERSION"); @@ -170,21 +174,13 @@ fn collect_leaf_type_schemas(node: &TreeNode, schemas: &mut TypeSchemas) { /// Collect type definitions from schemars-generated schema's definitions section. /// Schemars uses `definitions` or `$defs` to store referenced types. fn collect_schema_definitions(schema: &Value, schemas: &mut TypeSchemas) { - // Check for definitions (JSON Schema draft-07 style) - if let Some(defs) = schema.get("definitions").and_then(|d| d.as_object()) { - for (name, def_schema) in defs { - // Use the definition name as-is (schemars names match $ref paths) - if !schemas.contains_key(name) { - schemas.insert(name.clone(), def_schema.clone()); - } - } - } - - // Check for $defs (JSON Schema draft 2019-09+ style) - if let Some(defs) = schema.get("$defs").and_then(|d| d.as_object()) { - for (name, def_schema) in defs { - if !schemas.contains_key(name) { - schemas.insert(name.clone(), def_schema.clone()); + // Check both JSON Schema draft-07 style ("definitions") and draft 2019-09+ style ("$defs") + for key in ["definitions", "$defs"] { + if let Some(defs) = schema.get(key).and_then(|d| d.as_object()) { + for (name, def_schema) in defs { + if !schemas.contains_key(name) { + schemas.insert(name.clone(), def_schema.clone()); + } } } } diff --git a/crates/brk_binder/src/openapi.rs b/crates/brk_bindgen/src/openapi.rs similarity index 97% rename from crates/brk_binder/src/openapi.rs rename to crates/brk_bindgen/src/openapi.rs index 1a7ec927c..f4758e30e 100644 --- a/crates/brk_binder/src/openapi.rs +++ b/crates/brk_bindgen/src/openapi.rs @@ -1,5 +1,6 @@ use std::{collections::BTreeMap, io}; +use crate::ref_to_type_name; use oas3::Spec; use oas3::spec::{ObjectOrReference, Operation, ParameterIn, PathItem, Schema, SchemaTypeSet}; use serde_json::Value; @@ -215,7 +216,7 @@ fn extract_parameters(operation: &Operation, location: ParameterIn) -> Vec { - ref_path.rsplit('/').next().map(|s| s.to_string()) + ref_to_type_name(ref_path).map(|s| s.to_string()) } ObjectOrReference::Object(obj_schema) => schema_to_type_name(obj_schema), }) @@ -246,7 +247,7 @@ fn extract_response_type(operation: &Operation) -> Option { match &content.schema { Some(ObjectOrReference::Ref { ref_path, .. }) => { // Extract type name from reference like "#/components/schemas/Block" - Some(ref_path.rsplit('/').next()?.to_string()) + Some(ref_to_type_name(ref_path)?.to_string()) } Some(ObjectOrReference::Object(schema)) => schema_to_type_name(schema), None => None, @@ -264,7 +265,7 @@ fn schema_type_from_schema(schema: &Schema) -> Option { ObjectOrReference::Ref { ref_path, .. } => { // Return the type name as-is (e.g., "Height", "Address") // These should have definitions generated from schemas - ref_path.rsplit('/').next().map(|s| s.to_string()) + ref_to_type_name(ref_path).map(|s| s.to_string()) } }, } diff --git a/crates/brk_bindgen/src/syntax.rs b/crates/brk_bindgen/src/syntax.rs new file mode 100644 index 000000000..b4c9a814e --- /dev/null +++ b/crates/brk_bindgen/src/syntax.rs @@ -0,0 +1,105 @@ +//! Language-specific syntax traits for code generation. +//! +//! This module defines the `LanguageSyntax` trait that abstracts over +//! language-specific code generation patterns, allowing shared generation +//! logic to work across Python, JavaScript, and Rust backends. + +use crate::{FieldNamePosition, GenericSyntax}; + +/// Language-specific syntax for code generation. +/// +/// Implementations of this trait provide the language-specific formatting +/// for generated client code. This allows the core generation logic to be +/// written once and reused across all supported languages. +pub trait LanguageSyntax { + /// Convert a field name to the language's naming convention. + /// + /// - Python/Rust: `snake_case` + /// - JavaScript: `camelCase` + fn field_name(&self, name: &str) -> String; + + /// Format an interpolated path expression. + /// + /// # Arguments + /// * `base_var` - The variable name to interpolate (e.g., "acc", "base_path") + /// * `suffix` - The suffix to append (e.g., "_field_name") + /// + /// # Returns + /// - Python: `f'{acc}_suffix'` + /// - JavaScript: `` `${acc}_suffix` `` + /// - Rust: `format!("{acc}_suffix")` + fn path_expr(&self, base_var: &str, suffix: &str) -> String; + + /// Format a `FieldNamePosition` as a path expression. + /// + /// This handles the different name transformation patterns (append, prepend, + /// identity, set_base) in a language-specific way. + fn position_expr(&self, pos: &FieldNamePosition, base_var: &str) -> String; + + /// Generate a constructor call for patterns and accessors. + /// + /// - Python: `TypeName(client, path)` + /// - JavaScript: `createTypeName(client, path)` + /// - Rust: `TypeName::new(client.clone(), path)` + fn constructor(&self, type_name: &str, path_expr: &str) -> String; + + /// Generate a field initialization line. + /// + /// # Arguments + /// * `indent` - The indentation string + /// * `name` - The field name (already converted to language convention) + /// * `type_ann` - The type annotation (may be ignored by some languages) + /// * `value` - The initialization value/expression + /// + /// # Returns + /// - Python: `{indent}self.{name}: {type_ann} = {value}` + /// - JavaScript: `{indent}{name}: {value},` + /// - Rust: `{indent}{name}: {value},` + fn field_init(&self, indent: &str, name: &str, type_ann: &str, value: &str) -> String; + + /// Get the generic type syntax for this language. + /// + /// - Python: `[T]` with default `Any` + /// - JavaScript: `` with default `unknown` + /// - Rust: `` with default `_` + fn generic_syntax(&self) -> GenericSyntax; + + /// Generate a struct/class header. + /// + /// # Arguments + /// * `name` - The type name + /// * `generic_params` - Generic parameters (e.g., "" or "[T]"), empty if none + /// * `doc` - Optional documentation string + fn struct_header(&self, name: &str, generic_params: &str, doc: Option<&str>) -> String; + + /// Generate a struct/class footer. + fn struct_footer(&self) -> String; + + /// Generate a constructor/init method header. + /// + /// # Arguments + /// * `params` - Constructor parameters (language-specific format) + fn constructor_header(&self, params: &str) -> String; + + /// Generate a constructor/init method footer. + fn constructor_footer(&self) -> String; + + /// Generate a field declaration (for struct body, not init). + /// + /// # Arguments + /// * `indent` - The indentation string + /// * `name` - The field name + /// * `type_ann` - The type annotation + fn field_declaration(&self, indent: &str, name: &str, type_ann: &str) -> String; + + /// Format an index field name from an Index. + /// + /// E.g., `by_date_height`, `by_date`, etc. + fn index_field_name(&self, index_name: &str) -> String; + + /// Format a string literal. + /// + /// - Python/JavaScript: `'value'` (single quotes) + /// - Rust: `"value"` (double quotes) + fn string_literal(&self, value: &str) -> String; +} diff --git a/crates/brk_binder/src/types/case.rs b/crates/brk_bindgen/src/types/case.rs similarity index 57% rename from crates/brk_binder/src/types/case.rs rename to crates/brk_bindgen/src/types/case.rs index 3bce6fcc4..e03d5b333 100644 --- a/crates/brk_binder/src/types/case.rs +++ b/crates/brk_bindgen/src/types/case.rs @@ -1,3 +1,5 @@ +use brk_types::Index; + /// Convert a string to PascalCase (e.g., "fee_rate" -> "FeeRate"). pub fn to_pascal_case(s: &str) -> String { s.replace('-', "_") @@ -51,3 +53,38 @@ pub fn to_camel_case(s: &str) -> String { result } } + +/// Convert an Index to a snake_case field name (e.g., DateIndex -> by_dateindex). +pub fn index_to_field_name(index: &Index) -> String { + format!("by_{}", to_snake_case(index.serialize_long())) +} + +/// Generate a child type/struct/class name (e.g., ParentName + child_name -> ParentName_ChildName). +pub fn child_type_name(parent: &str, child: &str) -> String { + format!("{}_{}", parent, to_pascal_case(child)) +} + +/// Escape Python reserved keywords by appending an underscore. +/// Also prefixes names starting with digits with an underscore. +pub fn escape_python_keyword(name: &str) -> String { + const PYTHON_KEYWORDS: &[&str] = &[ + "False", "None", "True", "and", "as", "assert", "async", "await", "break", "class", + "continue", "def", "del", "elif", "else", "except", "finally", "for", "from", "global", + "if", "import", "in", "is", "lambda", "nonlocal", "not", "or", "pass", "raise", "return", + "try", "while", "with", "yield", + ]; + + // Prefix with underscore if starts with digit + let name = if name.starts_with(|c: char| c.is_ascii_digit()) { + format!("_{}", name) + } else { + name.to_string() + }; + + // Append underscore if it's a keyword + if PYTHON_KEYWORDS.contains(&name.as_str()) { + format!("{}_", name) + } else { + name + } +} diff --git a/crates/brk_bindgen/src/types/metadata.rs b/crates/brk_bindgen/src/types/metadata.rs new file mode 100644 index 000000000..2340cb0eb --- /dev/null +++ b/crates/brk_bindgen/src/types/metadata.rs @@ -0,0 +1,119 @@ +//! Client metadata extracted from brk_query. + +use std::collections::{BTreeSet, HashMap}; + +use brk_query::Vecs; +use brk_types::Index; + +use super::{GenericSyntax, IndexSetPattern, PatternField, StructuralPattern, extract_inner_type}; +use crate::analysis; + +/// Metadata extracted from brk_query for client generation. +#[derive(Debug)] +pub struct ClientMetadata { + /// The catalog tree structure (with schemas in leaves) + pub catalog: brk_types::TreeNode, + /// Structural patterns - tree node shapes that repeat + pub structural_patterns: Vec, + /// All indexes used across the catalog + pub used_indexes: BTreeSet, + /// Index set patterns - sets of indexes that appear together on metrics + pub index_set_patterns: Vec, + /// Maps concrete field signatures to pattern names + concrete_to_pattern: HashMap, String>, + /// Maps concrete field signatures to their type parameter (for generic patterns) + concrete_to_type_param: HashMap, String>, +} + +impl ClientMetadata { + /// Extract metadata from brk_query::Vecs. + pub fn from_vecs(vecs: &Vecs) -> Self { + let catalog = vecs.catalog().clone(); + let (structural_patterns, concrete_to_pattern, concrete_to_type_param) = + analysis::detect_structural_patterns(&catalog); + let (used_indexes, index_set_patterns) = analysis::detect_index_patterns(&catalog); + + ClientMetadata { + catalog, + structural_patterns, + used_indexes, + index_set_patterns, + concrete_to_pattern, + concrete_to_type_param, + } + } + + /// Find an index set pattern that matches the given indexes. + pub fn find_index_set_pattern(&self, indexes: &BTreeSet) -> Option<&IndexSetPattern> { + self.index_set_patterns + .iter() + .find(|p| &p.indexes == indexes) + } + + /// Check if a type is a structural pattern name. + pub fn is_pattern_type(&self, type_name: &str) -> bool { + self.structural_patterns.iter().any(|p| p.name == type_name) + } + + /// Find a pattern by name. + pub fn find_pattern(&self, name: &str) -> Option<&StructuralPattern> { + self.structural_patterns.iter().find(|p| p.name == name) + } + + /// Check if a pattern is generic. + pub fn is_pattern_generic(&self, name: &str) -> bool { + self.find_pattern(name).is_some_and(|p| p.is_generic) + } + + /// Get the type parameter for a generic pattern given its concrete fields. + pub fn get_type_param(&self, fields: &[PatternField]) -> Option<&String> { + self.concrete_to_type_param.get(fields) + } + + /// Build a lookup map from field signatures to pattern names. + pub fn pattern_lookup(&self) -> HashMap, String> { + let mut lookup = self.concrete_to_pattern.clone(); + for p in &self.structural_patterns { + lookup.insert(p.fields.clone(), p.name.clone()); + } + lookup + } + + /// Check if a field should use a shared index accessor. + pub fn field_uses_accessor(&self, field: &PatternField) -> bool { + self.find_index_set_pattern(&field.indexes).is_some() + } + + /// Generate type annotation for a field with language-specific syntax. + pub fn field_type_annotation( + &self, + field: &PatternField, + is_generic: bool, + generic_value_type: Option<&str>, + syntax: GenericSyntax, + ) -> String { + let value_type = if is_generic && field.rust_type == "T" { + "T".to_string() + } else { + extract_inner_type(&field.rust_type) + }; + + if self.is_pattern_type(&field.rust_type) { + if self.is_pattern_generic(&field.rust_type) { + let type_param = field + .type_param + .as_deref() + .or(generic_value_type) + .unwrap_or(if is_generic { "T" } else { syntax.default_type }); + return syntax.wrap(&field.rust_type, type_param); + } + field.rust_type.clone() + } else if field.is_branch() { + field.rust_type.clone() + } else if let Some(accessor) = self.find_index_set_pattern(&field.indexes) { + syntax.wrap(&accessor.name, &value_type) + } else { + syntax.wrap("MetricNode", &value_type) + } + } +} diff --git a/crates/brk_bindgen/src/types/mod.rs b/crates/brk_bindgen/src/types/mod.rs new file mode 100644 index 000000000..006450bc4 --- /dev/null +++ b/crates/brk_bindgen/src/types/mod.rs @@ -0,0 +1,31 @@ +//! Core types for client generation. + +mod case; +mod metadata; +mod positions; +mod schema; +mod structs; + +pub use case::*; +pub use metadata::*; +pub use positions::*; +pub use schema::*; +pub use structs::*; + +/// Language-specific syntax for generic type annotations. +#[derive(Clone, Copy)] +pub struct GenericSyntax { + pub open: char, + pub close: char, + pub default_type: &'static str, +} + +impl GenericSyntax { + pub const PYTHON: Self = Self { open: '[', close: ']', default_type: "Any" }; + pub const JAVASCRIPT: Self = Self { open: '<', close: '>', default_type: "unknown" }; + pub const RUST: Self = Self { open: '<', close: '>', default_type: "_" }; + + pub fn wrap(&self, name: &str, type_param: &str) -> String { + format!("{}{}{}{}", name, self.open, type_param, self.close) + } +} diff --git a/crates/brk_bindgen/src/types/positions.rs b/crates/brk_bindgen/src/types/positions.rs new file mode 100644 index 000000000..6e8c7d3a5 --- /dev/null +++ b/crates/brk_bindgen/src/types/positions.rs @@ -0,0 +1,14 @@ +//! Field name position types for metric name reconstruction. + +/// How a field modifies the accumulated metric name. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum FieldNamePosition { + /// Field prepends a prefix: leaf.name() = prefix + accumulated + Prepend(String), + /// Field appends a suffix: leaf.name() = accumulated + suffix + Append(String), + /// Field IS the accumulated name (no modification) + Identity, + /// Field sets a new base name (used at pattern entry points) + SetBase(String), +} diff --git a/crates/brk_binder/src/types/schema.rs b/crates/brk_bindgen/src/types/schema.rs similarity index 82% rename from crates/brk_binder/src/types/schema.rs rename to crates/brk_bindgen/src/types/schema.rs index f6fd439f8..ec64a5253 100644 --- a/crates/brk_binder/src/types/schema.rs +++ b/crates/brk_bindgen/src/types/schema.rs @@ -11,13 +11,6 @@ pub fn unwrap_allof(schema: &Value) -> &Value { schema } -/// Check if a schema represents an enum type. -/// Enums have either an "enum" array or "oneOf" without properties. -pub fn is_enum_schema(schema: &Value) -> bool { - schema.get("enum").is_some() - || (schema.get("oneOf").is_some() && schema.get("properties").is_none()) -} - /// Extract inner type from a wrapper generic like `Close` -> `Dollars`. /// Also handles malformed types like `Dollars>` (from vecdb's short_type_name). pub fn extract_inner_type(type_str: &str) -> String { @@ -43,3 +36,9 @@ pub fn schema_to_json_type(schema: &Value) -> String { .unwrap_or("object") .to_string() } + +/// Extract type name from a JSON Schema $ref path. +/// E.g., "#/definitions/MyType" -> "MyType", "#/$defs/Foo" -> "Foo" +pub fn ref_to_type_name(ref_path: &str) -> Option<&str> { + ref_path.rsplit('/').next() +} diff --git a/crates/brk_bindgen/src/types/structs.rs b/crates/brk_bindgen/src/types/structs.rs new file mode 100644 index 000000000..342d3b9ae --- /dev/null +++ b/crates/brk_bindgen/src/types/structs.rs @@ -0,0 +1,95 @@ +//! Structural pattern and field types. + +use std::collections::{BTreeSet, HashMap}; + +use brk_types::Index; + +use super::FieldNamePosition; + +/// A pattern of indexes that appear together on multiple metrics. +#[derive(Debug, Clone)] +pub struct IndexSetPattern { + /// Pattern name (e.g., "DateHeightIndexes") + pub name: String, + /// The set of indexes + pub indexes: BTreeSet, +} + +/// A structural pattern - a branch structure that appears multiple times. +#[derive(Debug, Clone)] +pub struct StructuralPattern { + /// Pattern name + pub name: String, + /// Ordered list of child fields + pub fields: Vec, + /// How each field modifies the accumulated name + pub field_positions: HashMap, + /// If true, all leaf fields use a type parameter T + pub is_generic: bool, +} + +impl StructuralPattern { + /// Returns true if this pattern contains any leaf fields. + pub fn contains_leaves(&self) -> bool { + self.fields.iter().any(|f| f.is_leaf()) + } + + /// Returns true if all leaf fields have consistent name transformations. + pub fn is_parameterizable(&self) -> bool { + !self.field_positions.is_empty() + && self + .fields + .iter() + .all(|f| f.is_branch() || self.field_positions.contains_key(&f.name)) + } + + /// Get the field position for a given field name. + pub fn get_field_position(&self, field_name: &str) -> Option<&FieldNamePosition> { + self.field_positions.get(field_name) + } +} + +/// A field in a structural pattern. +#[derive(Debug, Clone, PartialOrd, Ord)] +pub struct PatternField { + /// Field name + pub name: String, + /// Rust type for leaves or pattern name for branches + pub rust_type: String, + /// JSON type from schema + pub json_type: String, + /// For leaves: the set of supported indexes. Empty for branches. + pub indexes: BTreeSet, + /// For branches referencing generic patterns: the concrete type parameter + pub type_param: Option, +} + +impl PatternField { + /// Returns true if this is a leaf field (has indexes). + pub fn is_leaf(&self) -> bool { + !self.indexes.is_empty() + } + + /// Returns true if this is a branch field (no indexes). + pub fn is_branch(&self) -> bool { + self.indexes.is_empty() + } +} + +impl std::hash::Hash for PatternField { + fn hash(&self, state: &mut H) { + self.name.hash(state); + self.rust_type.hash(state); + self.json_type.hash(state); + } +} + +impl PartialEq for PatternField { + fn eq(&self, other: &Self) -> bool { + self.name == other.name + && self.rust_type == other.rust_type + && self.json_type == other.json_type + } +} + +impl Eq for PatternField {} diff --git a/crates/brk_bundler/Cargo.toml b/crates/brk_bundler/Cargo.toml index 1bf78686a..7b2f8e18f 100644 --- a/crates/brk_bundler/Cargo.toml +++ b/crates/brk_bundler/Cargo.toml @@ -12,6 +12,9 @@ build = "build.rs" log = { workspace = true } notify = "8.2.0" # rolldown = { path = "../../../rolldown/crates/rolldown", package = "brk_rolldown" } -rolldown = { version = "0.6.0", package = "brk_rolldown" } +rolldown = { version = "0.7.0", package = "brk_rolldown" } sugar_path = "1.2.1" tokio = { workspace = true } + +[dev-dependencies] +env_logger = { workspace = true } diff --git a/crates/brk_bundler/examples/bundle.rs b/crates/brk_bundler/examples/bundle.rs new file mode 100644 index 000000000..46dfeba4b --- /dev/null +++ b/crates/brk_bundler/examples/bundle.rs @@ -0,0 +1,37 @@ +use std::{io, path::PathBuf, thread, time::Duration}; + +use brk_bundler::bundle; + +fn find_dev_dirs() -> Option<(PathBuf, PathBuf)> { + let mut dir = std::env::current_dir().ok()?; + loop { + let websites = dir.join("websites"); + let modules = dir.join("modules"); + if websites.exists() && modules.exists() { + return Some((websites, modules)); + } + // Stop at workspace root (crates/ indicates we're there) + if dir.join("crates").exists() { + return None; + } + dir = dir.parent()?.to_path_buf(); + } +} + +#[tokio::main] +async fn main() -> io::Result<()> { + env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("debug")).init(); + + let (websites_path, modules_path) = + find_dev_dirs().expect("Run from within the brk workspace"); + let source_folder = "bitview"; + + let dist_path = bundle(&modules_path, &websites_path, source_folder, true).await?; + + println!("Bundle created at: {}", dist_path.display()); + println!("Watching for changes... (Ctrl+C to stop)"); + + loop { + thread::sleep(Duration::from_secs(60)); + } +} diff --git a/crates/brk_bundler/src/lib.rs b/crates/brk_bundler/src/lib.rs index 8a733ce16..0cddde9eb 100644 --- a/crates/brk_bundler/src/lib.rs +++ b/crates/brk_bundler/src/lib.rs @@ -3,10 +3,11 @@ use std::{ fs, io, path::{Path, PathBuf}, + time::Duration, }; -use log::error; -use notify::{EventKind, RecursiveMode, Watcher}; +use log::{debug, error, info}; +use notify::{EventKind, PollWatcher, RecursiveMode, Watcher}; use rolldown::{ Bundler, BundlerConfig, BundlerOptions, InlineConstConfig, InlineConstMode, InlineConstOption, OptimizationOption, RawMinifyOptions, SourceMapType, @@ -45,6 +46,11 @@ pub async fn bundle( let absolute_dist_index_path = absolute_dist_path.join("index.html"); let absolute_dist_sw_path = absolute_dist_path.join("service-worker.js"); + info!("Bundling {source_folder}..."); + info!(" modules: {absolute_modules_path:?}"); + info!(" source: {absolute_source_path:?}"); + info!(" dist: {absolute_dist_path:?}"); + let _ = fs::remove_dir_all(&absolute_dist_path); let _ = fs::remove_dir_all(&absolute_source_scripts_modules_path); copy_dir_all( @@ -122,53 +128,99 @@ pub async fn bundle( return Ok(relative_dist_path); } - tokio::spawn(async move { - let mut event_watcher = notify::recommended_watcher( - move |res: Result| match res { - Ok(event) => match event.kind { - EventKind::Create(_) => event.paths, - EventKind::Modify(_) => event.paths, - _ => vec![], - } - .into_iter() - .for_each(|path| { - let path = path.absolutize(); + // Clone paths for the second watcher + let absolute_websites_path_clone2 = absolute_websites_path_clone.clone(); + let absolute_modules_path_clone2 = absolute_modules_path_clone.clone(); - if path == absolute_dist_scripts_entry_path - || path == absolute_source_index_path_clone - { - update_dist_index(); - } else if path == absolute_source_sw_path_clone { - update_source_sw(); - } else if let Ok(suffix) = path.strip_prefix(&absolute_modules_path) { - let source_modules_path = absolute_source_scripts_modules_path.join(suffix); - if path.is_file() { - let _ = fs::create_dir_all(path.parent().unwrap()); - let _ = fs::copy(&path, &source_modules_path); - } - } else if let Ok(suffix) = path.strip_prefix(&absolute_source_path) - // scripts are handled by rolldown - && !path.starts_with(&absolute_source_scripts_path) - { - let dist_path = absolute_dist_path.join(suffix); - if path.is_file() { - let _ = fs::create_dir_all(path.parent().unwrap()); - let _ = fs::copy(&path, &dist_path); + tokio::spawn(async move { + let handle_event = { + let absolute_dist_scripts_entry_path = absolute_dist_scripts_entry_path.clone(); + let absolute_source_index_path_clone = absolute_source_index_path_clone.clone(); + let absolute_source_sw_path_clone = absolute_source_sw_path_clone.clone(); + let absolute_modules_path = absolute_modules_path.clone(); + let absolute_source_scripts_modules_path = absolute_source_scripts_modules_path.clone(); + let absolute_source_path = absolute_source_path.clone(); + let absolute_source_scripts_path = absolute_source_scripts_path.clone(); + let absolute_dist_path = absolute_dist_path.clone(); + let update_dist_index = update_dist_index.clone(); + let update_source_sw = update_source_sw.clone(); + + move |path: PathBuf| { + let path = path.absolutize(); + + if path == absolute_dist_scripts_entry_path + || path == absolute_source_index_path_clone + { + update_dist_index(); + } else if path == absolute_source_sw_path_clone { + update_source_sw(); + } else if let Ok(suffix) = path.strip_prefix(&absolute_modules_path) { + let dest = absolute_source_scripts_modules_path.join(suffix); + if path.is_file() { + debug!("Copying module: {path:?} -> {dest:?}"); + let _ = fs::create_dir_all(dest.parent().unwrap()); + if let Err(e) = fs::copy(&path, &dest) { + error!("Copy failed: {e}"); } } - }), - Err(e) => error!("watch error: {e:?}"), + } else if let Ok(suffix) = path.strip_prefix(&absolute_source_path) + // scripts are handled by rolldown + && !path.starts_with(&absolute_source_scripts_path) + { + let dist_path = absolute_dist_path.join(suffix); + if path.is_file() { + let _ = fs::create_dir_all(path.parent().unwrap()); + let _ = fs::copy(&path, &dist_path); + } + } + } + }; + + // FSEvents watcher for instant response to manual saves + let handle_event_clone = handle_event.clone(); + let mut fs_watcher = notify::recommended_watcher( + move |res: Result| match res { + Ok(event) => match event.kind { + EventKind::Create(_) | EventKind::Modify(_) => { + event.paths.into_iter().for_each(&handle_event_clone); + } + _ => {} + }, + Err(e) => error!("fs watch error: {e:?}"), }, ) .unwrap(); - event_watcher + fs_watcher .watch(&absolute_websites_path_clone, RecursiveMode::Recursive) .unwrap(); - event_watcher + fs_watcher .watch(&absolute_modules_path_clone, RecursiveMode::Recursive) .unwrap(); + // Poll watcher to catch programmatic edits (e.g., Claude Code's atomic writes) + let poll_config = notify::Config::default().with_poll_interval(Duration::from_secs(1)); + let mut poll_watcher = PollWatcher::new( + move |res: Result| match res { + Ok(event) => match event.kind { + EventKind::Create(_) | EventKind::Modify(_) => { + event.paths.into_iter().for_each(&handle_event); + } + _ => {} + }, + Err(e) => error!("poll watch error: {e:?}"), + }, + poll_config, + ) + .unwrap(); + + poll_watcher + .watch(&absolute_websites_path_clone2, RecursiveMode::Recursive) + .unwrap(); + poll_watcher + .watch(&absolute_modules_path_clone2, RecursiveMode::Recursive) + .unwrap(); + let config = BundlerConfig::new(bundler_options, vec![]); let watcher = rolldown::Watcher::new(config, None).unwrap(); diff --git a/crates/brk_cli/Cargo.toml b/crates/brk_cli/Cargo.toml index 2897e73ea..3b83790f5 100644 --- a/crates/brk_cli/Cargo.toml +++ b/crates/brk_cli/Cargo.toml @@ -9,7 +9,7 @@ repository.workspace = true build = "build.rs" [dependencies] -brk_binder = { workspace = true } +brk_bindgen = { workspace = true } brk_bundler = { workspace = true } brk_computer = { workspace = true } brk_error = { workspace = true } diff --git a/crates/brk_cli/src/main.rs b/crates/brk_cli/src/main.rs index 8c40e76ff..3a24675e3 100644 --- a/crates/brk_cli/src/main.rs +++ b/crates/brk_cli/src/main.rs @@ -3,7 +3,7 @@ use std::{ fs, io::Cursor, - path::Path, + path::PathBuf, thread::{self, sleep}, time::Duration, }; @@ -83,15 +83,29 @@ pub fn run() -> color_eyre::Result<()> { let future = async move { let bundle_path = if website.is_some() { - let websites_dev_path = Path::new("../../websites"); - let modules_dev_path = Path::new("../../modules"); + // Try to find local dev directories - check cwd and parent directories + let find_dev_dirs = || -> Option<(PathBuf, PathBuf)> { + let mut dir = std::env::current_dir().ok()?; + loop { + let websites = dir.join("websites"); + let modules = dir.join("modules"); + if websites.exists() && modules.exists() { + return Some((websites, modules)); + } + // Stop at workspace root (crates/ indicates we're there) + if dir.join("crates").exists() { + return None; + } + dir = dir.parent()?.to_path_buf(); + } + }; let websites_path; let modules_path; - if fs::exists(websites_dev_path)? && fs::exists(modules_dev_path)? { - websites_path = websites_dev_path.to_path_buf(); - modules_path = modules_dev_path.to_path_buf(); + if let Some((websites, modules)) = find_dev_dirs() { + websites_path = websites; + modules_path = modules; } else { let downloaded_brk_path = downloads_path.join(format!("brk-{VERSION}")); @@ -105,7 +119,7 @@ pub fn run() -> color_eyre::Result<()> { "https://github.com/bitcoinresearchkit/brk/archive/refs/tags/v{VERSION}.zip", ); - let response = minreq::get(url).send()?; + let response = minreq::get(url).with_timeout(60).send()?; let bytes = response.as_bytes(); let cursor = Cursor::new(bytes); diff --git a/crates/brk_client/src/lib.rs b/crates/brk_client/src/lib.rs index c2ffc24e2..3f48a5a3e 100644 --- a/crates/brk_client/src/lib.rs +++ b/crates/brk_client/src/lib.rs @@ -72,7 +72,8 @@ impl BrkClientBase { /// Make a GET request. pub fn get(&self, path: &str) -> Result { - let url = format!("{}{}", self.base_url, path); + let base = self.base_url.trim_end_matches('/'); + let url = format!("{}{}", base, path); let response = minreq::get(&url) .with_timeout(self.timeout_secs) .send() @@ -90,517 +91,3157 @@ impl BrkClientBase { } } +/// Build metric name with optional prefix. +#[inline] +fn _m(acc: &str, s: &str) -> String { + if acc.is_empty() { s.to_string() } else { format!("{acc}_{s}") } +} -/// A metric node that can fetch data for different indexes. -pub struct MetricNode { + +/// Non-generic trait for metric patterns (usable in collections). +pub trait AnyMetricPattern { + /// Get the metric name. + fn name(&self) -> &str; + + /// Get the list of available indexes for this metric. + fn indexes(&self) -> &'static [Index]; +} + +/// Generic trait for metric patterns with endpoint access. +pub trait MetricPattern: AnyMetricPattern { + /// Get an endpoint for a specific index, if supported. + fn get(&self, index: Index) -> Option>; +} + + +/// An endpoint for a specific metric + index combination. +pub struct Endpoint { client: Arc, - path: String, + name: Arc, + index: Index, _marker: std::marker::PhantomData, } -impl MetricNode { - pub fn new(client: Arc, path: String) -> Self { +impl Endpoint { + pub fn new(client: Arc, name: Arc, index: Index) -> Self { Self { client, - path, + name, + index, _marker: std::marker::PhantomData, } } - /// Fetch all data points for this metric. + /// Fetch all data points for this metric/index. pub fn get(&self) -> Result> { - self.client.get(&self.path) + self.client.get(&self.path()) } /// Fetch data points within a range. - pub fn get_range(&self, from: Option<&str>, to: Option<&str>) -> Result> { + pub fn range(&self, from: Option, to: Option) -> Result> { let mut params = Vec::new(); if let Some(f) = from { params.push(format!("from={}", f)); } if let Some(t) = to { params.push(format!("to={}", t)); } + let p = self.path(); let path = if params.is_empty() { - self.path.clone() + p } else { - format!("{}?{}", self.path, params.join("&")) + format!("{}?{}", p, params.join("&")) }; self.client.get(&path) } + + /// Get the endpoint path. + pub fn path(&self) -> String { + format!("/api/metric/{}/{}", self.name, self.index.serialize_long()) + } } // Index accessor structs +/// Container for index endpoint methods. +pub struct MetricPattern1By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern1By { + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + } + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + } + pub fn by_difficultyepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) + } + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + } + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + } + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + } + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + } + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + } +} + /// Index accessor for metrics with 9 indexes. -pub struct Indexes3 { - pub by_dateindex: MetricNode, - pub by_decadeindex: MetricNode, - pub by_difficultyepoch: MetricNode, - pub by_height: MetricNode, - pub by_monthindex: MetricNode, - pub by_quarterindex: MetricNode, - pub by_semesterindex: MetricNode, - pub by_weekindex: MetricNode, - pub by_yearindex: MetricNode, +pub struct MetricPattern1 { + client: Arc, + name: Arc, + pub by: MetricPattern1By, } -impl Indexes3 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern1 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_dateindex: MetricNode::new(client.clone(), format!("{base_path}/dateindex")), - by_decadeindex: MetricNode::new(client.clone(), format!("{base_path}/decadeindex")), - by_difficultyepoch: MetricNode::new(client.clone(), format!("{base_path}/difficultyepoch")), - by_height: MetricNode::new(client.clone(), format!("{base_path}/height")), - by_monthindex: MetricNode::new(client.clone(), format!("{base_path}/monthindex")), - by_quarterindex: MetricNode::new(client.clone(), format!("{base_path}/quarterindex")), - by_semesterindex: MetricNode::new(client.clone(), format!("{base_path}/semesterindex")), - by_weekindex: MetricNode::new(client.clone(), format!("{base_path}/weekindex")), - by_yearindex: MetricNode::new(client.clone(), format!("{base_path}/yearindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern1By { + client, + name, + _marker: std::marker::PhantomData, + } } } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern1 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DateIndex, + Index::DecadeIndex, + Index::DifficultyEpoch, + Index::Height, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern1 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DateIndex => Some(self.by.by_dateindex()), + Index::DecadeIndex => Some(self.by.by_decadeindex()), + Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), + Index::Height => Some(self.by.by_height()), + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::QuarterIndex => Some(self.by.by_quarterindex()), + Index::SemesterIndex => Some(self.by.by_semesterindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern2By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern2By { + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + } + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + } + pub fn by_difficultyepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) + } + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + } + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + } + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + } + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + } } /// Index accessor for metrics with 8 indexes. -pub struct Indexes4 { - pub by_dateindex: MetricNode, - pub by_decadeindex: MetricNode, - pub by_difficultyepoch: MetricNode, - pub by_monthindex: MetricNode, - pub by_quarterindex: MetricNode, - pub by_semesterindex: MetricNode, - pub by_weekindex: MetricNode, - pub by_yearindex: MetricNode, +pub struct MetricPattern2 { + client: Arc, + name: Arc, + pub by: MetricPattern2By, } -impl Indexes4 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern2 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_dateindex: MetricNode::new(client.clone(), format!("{base_path}/dateindex")), - by_decadeindex: MetricNode::new(client.clone(), format!("{base_path}/decadeindex")), - by_difficultyepoch: MetricNode::new(client.clone(), format!("{base_path}/difficultyepoch")), - by_monthindex: MetricNode::new(client.clone(), format!("{base_path}/monthindex")), - by_quarterindex: MetricNode::new(client.clone(), format!("{base_path}/quarterindex")), - by_semesterindex: MetricNode::new(client.clone(), format!("{base_path}/semesterindex")), - by_weekindex: MetricNode::new(client.clone(), format!("{base_path}/weekindex")), - by_yearindex: MetricNode::new(client.clone(), format!("{base_path}/yearindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern2By { + client, + name, + _marker: std::marker::PhantomData, + } } } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern2 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DateIndex, + Index::DecadeIndex, + Index::DifficultyEpoch, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern2 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DateIndex => Some(self.by.by_dateindex()), + Index::DecadeIndex => Some(self.by.by_decadeindex()), + Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::QuarterIndex => Some(self.by.by_quarterindex()), + Index::SemesterIndex => Some(self.by.by_semesterindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern3By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern3By { + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + } + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + } + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + } + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + } + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + } + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + } + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + } } /// Index accessor for metrics with 8 indexes. -pub struct Indexes26 { - pub by_dateindex: MetricNode, - pub by_decadeindex: MetricNode, - pub by_height: MetricNode, - pub by_monthindex: MetricNode, - pub by_quarterindex: MetricNode, - pub by_semesterindex: MetricNode, - pub by_weekindex: MetricNode, - pub by_yearindex: MetricNode, +pub struct MetricPattern3 { + client: Arc, + name: Arc, + pub by: MetricPattern3By, } -impl Indexes26 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern3 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_dateindex: MetricNode::new(client.clone(), format!("{base_path}/dateindex")), - by_decadeindex: MetricNode::new(client.clone(), format!("{base_path}/decadeindex")), - by_height: MetricNode::new(client.clone(), format!("{base_path}/height")), - by_monthindex: MetricNode::new(client.clone(), format!("{base_path}/monthindex")), - by_quarterindex: MetricNode::new(client.clone(), format!("{base_path}/quarterindex")), - by_semesterindex: MetricNode::new(client.clone(), format!("{base_path}/semesterindex")), - by_weekindex: MetricNode::new(client.clone(), format!("{base_path}/weekindex")), - by_yearindex: MetricNode::new(client.clone(), format!("{base_path}/yearindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern3By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern3 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DateIndex, + Index::DecadeIndex, + Index::Height, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern3 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DateIndex => Some(self.by.by_dateindex()), + Index::DecadeIndex => Some(self.by.by_decadeindex()), + Index::Height => Some(self.by.by_height()), + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::QuarterIndex => Some(self.by.by_quarterindex()), + Index::SemesterIndex => Some(self.by.by_semesterindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, } } } -/// Index accessor for metrics with 7 indexes. -pub struct Indexes { - pub by_dateindex: MetricNode, - pub by_decadeindex: MetricNode, - pub by_monthindex: MetricNode, - pub by_quarterindex: MetricNode, - pub by_semesterindex: MetricNode, - pub by_weekindex: MetricNode, - pub by_yearindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern4By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - by_dateindex: MetricNode::new(client.clone(), format!("{base_path}/dateindex")), - by_decadeindex: MetricNode::new(client.clone(), format!("{base_path}/decadeindex")), - by_monthindex: MetricNode::new(client.clone(), format!("{base_path}/monthindex")), - by_quarterindex: MetricNode::new(client.clone(), format!("{base_path}/quarterindex")), - by_semesterindex: MetricNode::new(client.clone(), format!("{base_path}/semesterindex")), - by_weekindex: MetricNode::new(client.clone(), format!("{base_path}/weekindex")), - by_yearindex: MetricNode::new(client.clone(), format!("{base_path}/yearindex")), - } +impl MetricPattern4By { + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + } + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + } + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + } + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + } + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + } + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) } } /// Index accessor for metrics with 7 indexes. -pub struct Indexes27 { - pub by_decadeindex: MetricNode, - pub by_height: MetricNode, - pub by_monthindex: MetricNode, - pub by_quarterindex: MetricNode, - pub by_semesterindex: MetricNode, - pub by_weekindex: MetricNode, - pub by_yearindex: MetricNode, +pub struct MetricPattern4 { + client: Arc, + name: Arc, + pub by: MetricPattern4By, } -impl Indexes27 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern4 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_decadeindex: MetricNode::new(client.clone(), format!("{base_path}/decadeindex")), - by_height: MetricNode::new(client.clone(), format!("{base_path}/height")), - by_monthindex: MetricNode::new(client.clone(), format!("{base_path}/monthindex")), - by_quarterindex: MetricNode::new(client.clone(), format!("{base_path}/quarterindex")), - by_semesterindex: MetricNode::new(client.clone(), format!("{base_path}/semesterindex")), - by_weekindex: MetricNode::new(client.clone(), format!("{base_path}/weekindex")), - by_yearindex: MetricNode::new(client.clone(), format!("{base_path}/yearindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern4By { + client, + name, + _marker: std::marker::PhantomData, + } } } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern4 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DateIndex, + Index::DecadeIndex, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern4 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DateIndex => Some(self.by.by_dateindex()), + Index::DecadeIndex => Some(self.by.by_decadeindex()), + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::QuarterIndex => Some(self.by.by_quarterindex()), + Index::SemesterIndex => Some(self.by.by_semesterindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern5By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern5By { + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + } + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + } + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + } + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + } + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + } + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + } +} + +/// Index accessor for metrics with 7 indexes. +pub struct MetricPattern5 { + client: Arc, + name: Arc, + pub by: MetricPattern5By, +} + +impl MetricPattern5 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern5By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern5 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DecadeIndex, + Index::Height, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern5 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DecadeIndex => Some(self.by.by_decadeindex()), + Index::Height => Some(self.by.by_height()), + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::QuarterIndex => Some(self.by.by_quarterindex()), + Index::SemesterIndex => Some(self.by.by_semesterindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern6By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern6By { + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + } + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + } + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + } + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + } + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + } } /// Index accessor for metrics with 6 indexes. -pub struct Indexes28 { - pub by_decadeindex: MetricNode, - pub by_monthindex: MetricNode, - pub by_quarterindex: MetricNode, - pub by_semesterindex: MetricNode, - pub by_weekindex: MetricNode, - pub by_yearindex: MetricNode, +pub struct MetricPattern6 { + client: Arc, + name: Arc, + pub by: MetricPattern6By, } -impl Indexes28 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern6 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_decadeindex: MetricNode::new(client.clone(), format!("{base_path}/decadeindex")), - by_monthindex: MetricNode::new(client.clone(), format!("{base_path}/monthindex")), - by_quarterindex: MetricNode::new(client.clone(), format!("{base_path}/quarterindex")), - by_semesterindex: MetricNode::new(client.clone(), format!("{base_path}/semesterindex")), - by_weekindex: MetricNode::new(client.clone(), format!("{base_path}/weekindex")), - by_yearindex: MetricNode::new(client.clone(), format!("{base_path}/yearindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern6By { + client, + name, + _marker: std::marker::PhantomData, + } } } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern6 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DecadeIndex, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern6 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DecadeIndex => Some(self.by.by_decadeindex()), + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::QuarterIndex => Some(self.by.by_quarterindex()), + Index::SemesterIndex => Some(self.by.by_semesterindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern7By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern7By { + pub fn by_emptyoutputindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::EmptyOutputIndex) + } + pub fn by_opreturnindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::OpReturnIndex) + } + pub fn by_p2msoutputindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2MSOutputIndex) + } + pub fn by_unknownoutputindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::UnknownOutputIndex) + } +} + +/// Index accessor for metrics with 4 indexes. +pub struct MetricPattern7 { + client: Arc, + name: Arc, + pub by: MetricPattern7By, +} + +impl MetricPattern7 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern7By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern7 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::EmptyOutputIndex, + Index::OpReturnIndex, + Index::P2MSOutputIndex, + Index::UnknownOutputIndex, + ] + } +} + +impl MetricPattern for MetricPattern7 { + fn get(&self, index: Index) -> Option> { + match index { + Index::EmptyOutputIndex => Some(self.by.by_emptyoutputindex()), + Index::OpReturnIndex => Some(self.by.by_opreturnindex()), + Index::P2MSOutputIndex => Some(self.by.by_p2msoutputindex()), + Index::UnknownOutputIndex => Some(self.by.by_unknownoutputindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern8By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern8By { + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + } + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + } + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + } } /// Index accessor for metrics with 3 indexes. -pub struct Indexes23 { - pub by_quarterindex: MetricNode, - pub by_semesterindex: MetricNode, - pub by_yearindex: MetricNode, +pub struct MetricPattern8 { + client: Arc, + name: Arc, + pub by: MetricPattern8By, } -impl Indexes23 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern8 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_quarterindex: MetricNode::new(client.clone(), format!("{base_path}/quarterindex")), - by_semesterindex: MetricNode::new(client.clone(), format!("{base_path}/semesterindex")), - by_yearindex: MetricNode::new(client.clone(), format!("{base_path}/yearindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern8By { + client, + name, + _marker: std::marker::PhantomData, + } } } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern8 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::QuarterIndex, + Index::SemesterIndex, + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern8 { + fn get(&self, index: Index) -> Option> { + match index { + Index::QuarterIndex => Some(self.by.by_quarterindex()), + Index::SemesterIndex => Some(self.by.by_semesterindex()), + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern9By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern9By { + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + } + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + } } /// Index accessor for metrics with 2 indexes. -pub struct Indexes13 { - pub by_dateindex: MetricNode, - pub by_height: MetricNode, +pub struct MetricPattern9 { + client: Arc, + name: Arc, + pub by: MetricPattern9By, } -impl Indexes13 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern9 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_dateindex: MetricNode::new(client.clone(), format!("{base_path}/dateindex")), - by_height: MetricNode::new(client.clone(), format!("{base_path}/height")), + client: client.clone(), + name: name.clone(), + by: MetricPattern9By { + client, + name, + _marker: std::marker::PhantomData, + } } } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern9 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DateIndex, + Index::Height, + ] + } +} + +impl MetricPattern for MetricPattern9 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DateIndex => Some(self.by.by_dateindex()), + Index::Height => Some(self.by.by_height()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern10By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern10By { + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + } + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } } /// Index accessor for metrics with 2 indexes. -pub struct Indexes22 { - pub by_monthindex: MetricNode, - pub by_weekindex: MetricNode, +pub struct MetricPattern10 { + client: Arc, + name: Arc, + pub by: MetricPattern10By, } -impl Indexes22 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern10 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_monthindex: MetricNode::new(client.clone(), format!("{base_path}/monthindex")), - by_weekindex: MetricNode::new(client.clone(), format!("{base_path}/weekindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern10By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern10 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DateIndex, + Index::MonthIndex, + ] + } +} + +impl MetricPattern for MetricPattern10 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DateIndex => Some(self.by.by_dateindex()), + Index::MonthIndex => Some(self.by.by_monthindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes2 { - pub by_height: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern11By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes2 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern11By { + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + } + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern11 { + client: Arc, + name: Arc, + pub by: MetricPattern11By, +} + +impl MetricPattern11 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_height: MetricNode::new(client.clone(), format!("{base_path}/height")), + client: client.clone(), + name: name.clone(), + by: MetricPattern11By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern11 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DateIndex, + Index::WeekIndex, + ] + } +} + +impl MetricPattern for MetricPattern11 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DateIndex => Some(self.by.by_dateindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes5 { - pub by_dateindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern12By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes5 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern12By { + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + } + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern12 { + client: Arc, + name: Arc, + pub by: MetricPattern12By, +} + +impl MetricPattern12 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_dateindex: MetricNode::new(client.clone(), format!("{base_path}/dateindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern12By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern12 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DecadeIndex, + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern12 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DecadeIndex => Some(self.by.by_decadeindex()), + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes6 { - pub by_txindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern13By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes6 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern13By { + pub fn by_difficultyepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) + } + pub fn by_halvingepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::HalvingEpoch) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern13 { + client: Arc, + name: Arc, + pub by: MetricPattern13By, +} + +impl MetricPattern13 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_txindex: MetricNode::new(client.clone(), format!("{base_path}/txindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern13By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern13 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DifficultyEpoch, + Index::HalvingEpoch, + ] + } +} + +impl MetricPattern for MetricPattern13 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), + Index::HalvingEpoch => Some(self.by.by_halvingepoch()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes7 { - pub by_decadeindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern14By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes7 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern14By { + pub fn by_difficultyepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) + } + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern14 { + client: Arc, + name: Arc, + pub by: MetricPattern14By, +} + +impl MetricPattern14 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_decadeindex: MetricNode::new(client.clone(), format!("{base_path}/decadeindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern14By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern14 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DifficultyEpoch, + Index::Height, + ] + } +} + +impl MetricPattern for MetricPattern14 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), + Index::Height => Some(self.by.by_height()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes8 { - pub by_monthindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern15By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes8 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern15By { + pub fn by_halvingepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::HalvingEpoch) + } + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern15 { + client: Arc, + name: Arc, + pub by: MetricPattern15By, +} + +impl MetricPattern15 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_monthindex: MetricNode::new(client.clone(), format!("{base_path}/monthindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern15By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern15 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::HalvingEpoch, + Index::Height, + ] + } +} + +impl MetricPattern for MetricPattern15 { + fn get(&self, index: Index) -> Option> { + match index { + Index::HalvingEpoch => Some(self.by.by_halvingepoch()), + Index::Height => Some(self.by.by_height()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes9 { - pub by_quarterindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern16By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes9 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern16By { + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + } + pub fn by_txindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::TxIndex) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern16 { + client: Arc, + name: Arc, + pub by: MetricPattern16By, +} + +impl MetricPattern16 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_quarterindex: MetricNode::new(client.clone(), format!("{base_path}/quarterindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern16By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern16 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::Height, + Index::TxIndex, + ] + } +} + +impl MetricPattern for MetricPattern16 { + fn get(&self, index: Index) -> Option> { + match index { + Index::Height => Some(self.by.by_height()), + Index::TxIndex => Some(self.by.by_txindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes10 { - pub by_semesterindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern17By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes10 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern17By { + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern17 { + client: Arc, + name: Arc, + pub by: MetricPattern17By, +} + +impl MetricPattern17 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_semesterindex: MetricNode::new(client.clone(), format!("{base_path}/semesterindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern17By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern17 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::MonthIndex, + Index::QuarterIndex, + ] + } +} + +impl MetricPattern for MetricPattern17 { + fn get(&self, index: Index) -> Option> { + match index { + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::QuarterIndex => Some(self.by.by_quarterindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes11 { - pub by_weekindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern18By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes11 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern18By { + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern18 { + client: Arc, + name: Arc, + pub by: MetricPattern18By, +} + +impl MetricPattern18 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_weekindex: MetricNode::new(client.clone(), format!("{base_path}/weekindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern18By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern18 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::MonthIndex, + Index::SemesterIndex, + ] + } +} + +impl MetricPattern for MetricPattern18 { + fn get(&self, index: Index) -> Option> { + match index { + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::SemesterIndex => Some(self.by.by_semesterindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes12 { - pub by_yearindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern19By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes12 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern19By { + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern19 { + client: Arc, + name: Arc, + pub by: MetricPattern19By, +} + +impl MetricPattern19 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_yearindex: MetricNode::new(client.clone(), format!("{base_path}/yearindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern19By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern19 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::MonthIndex, + Index::WeekIndex, + ] + } +} + +impl MetricPattern for MetricPattern19 { + fn get(&self, index: Index) -> Option> { + match index { + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes14 { - pub by_p2aaddressindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern20By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes14 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern20By { + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + } +} + +/// Index accessor for metrics with 2 indexes. +pub struct MetricPattern20 { + client: Arc, + name: Arc, + pub by: MetricPattern20By, +} + +impl MetricPattern20 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_p2aaddressindex: MetricNode::new(client.clone(), format!("{base_path}/p2aaddressindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern20By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern20 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::MonthIndex, + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern20 { + fn get(&self, index: Index) -> Option> { + match index { + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes15 { - pub by_p2pk33addressindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern21By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes15 { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - by_p2pk33addressindex: MetricNode::new(client.clone(), format!("{base_path}/p2pk33addressindex")), - } +impl MetricPattern21By { + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) } } /// Index accessor for metrics with 1 indexes. -pub struct Indexes16 { - pub by_p2pk65addressindex: MetricNode, +pub struct MetricPattern21 { + client: Arc, + name: Arc, + pub by: MetricPattern21By, } -impl Indexes16 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern21 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_p2pk65addressindex: MetricNode::new(client.clone(), format!("{base_path}/p2pk65addressindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern21By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern21 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DateIndex, + ] + } +} + +impl MetricPattern for MetricPattern21 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DateIndex => Some(self.by.by_dateindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes17 { - pub by_p2pkhaddressindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern22By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes17 { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - by_p2pkhaddressindex: MetricNode::new(client.clone(), format!("{base_path}/p2pkhaddressindex")), - } +impl MetricPattern22By { + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) } } /// Index accessor for metrics with 1 indexes. -pub struct Indexes18 { - pub by_p2shaddressindex: MetricNode, +pub struct MetricPattern22 { + client: Arc, + name: Arc, + pub by: MetricPattern22By, } -impl Indexes18 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern22 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_p2shaddressindex: MetricNode::new(client.clone(), format!("{base_path}/p2shaddressindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern22By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern22 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DecadeIndex, + ] + } +} + +impl MetricPattern for MetricPattern22 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DecadeIndex => Some(self.by.by_decadeindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes19 { - pub by_p2traddressindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern23By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes19 { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - by_p2traddressindex: MetricNode::new(client.clone(), format!("{base_path}/p2traddressindex")), - } +impl MetricPattern23By { + pub fn by_difficultyepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) } } /// Index accessor for metrics with 1 indexes. -pub struct Indexes20 { - pub by_p2wpkhaddressindex: MetricNode, +pub struct MetricPattern23 { + client: Arc, + name: Arc, + pub by: MetricPattern23By, } -impl Indexes20 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern23 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_p2wpkhaddressindex: MetricNode::new(client.clone(), format!("{base_path}/p2wpkhaddressindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern23By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern23 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::DifficultyEpoch, + ] + } +} + +impl MetricPattern for MetricPattern23 { + fn get(&self, index: Index) -> Option> { + match index { + Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes21 { - pub by_p2wshaddressindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern24By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes21 { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - by_p2wshaddressindex: MetricNode::new(client.clone(), format!("{base_path}/p2wshaddressindex")), - } +impl MetricPattern24By { + pub fn by_emptyoutputindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::EmptyOutputIndex) } } /// Index accessor for metrics with 1 indexes. -pub struct Indexes24 { - pub by_txinindex: MetricNode, +pub struct MetricPattern24 { + client: Arc, + name: Arc, + pub by: MetricPattern24By, } -impl Indexes24 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern24 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_txinindex: MetricNode::new(client.clone(), format!("{base_path}/txinindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern24By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern24 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::EmptyOutputIndex, + ] + } +} + +impl MetricPattern for MetricPattern24 { + fn get(&self, index: Index) -> Option> { + match index { + Index::EmptyOutputIndex => Some(self.by.by_emptyoutputindex()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes25 { - pub by_txoutindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern25By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes25 { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - by_txoutindex: MetricNode::new(client.clone(), format!("{base_path}/txoutindex")), - } +impl MetricPattern25By { + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) } } /// Index accessor for metrics with 1 indexes. -pub struct Indexes29 { - pub by_emptyaddressindex: MetricNode, +pub struct MetricPattern25 { + client: Arc, + name: Arc, + pub by: MetricPattern25By, } -impl Indexes29 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern25 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_emptyaddressindex: MetricNode::new(client.clone(), format!("{base_path}/emptyaddressindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern25By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern25 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::Height, + ] + } +} + +impl MetricPattern for MetricPattern25 { + fn get(&self, index: Index) -> Option> { + match index { + Index::Height => Some(self.by.by_height()), + _ => None, } } } -/// Index accessor for metrics with 1 indexes. -pub struct Indexes30 { - pub by_loadedaddressindex: MetricNode, +/// Container for index endpoint methods. +pub struct MetricPattern26By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, } -impl Indexes30 { - pub fn new(client: Arc, base_path: &str) -> Self { +impl MetricPattern26By { + pub fn by_txinindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::TxInIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern26 { + client: Arc, + name: Arc, + pub by: MetricPattern26By, +} + +impl MetricPattern26 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); Self { - by_loadedaddressindex: MetricNode::new(client.clone(), format!("{base_path}/loadedaddressindex")), + client: client.clone(), + name: name.clone(), + by: MetricPattern26By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern26 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::TxInIndex, + ] + } +} + +impl MetricPattern for MetricPattern26 { + fn get(&self, index: Index) -> Option> { + match index { + Index::TxInIndex => Some(self.by.by_txinindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern27By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern27By { + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern27 { + client: Arc, + name: Arc, + pub by: MetricPattern27By, +} + +impl MetricPattern27 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern27By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern27 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::MonthIndex, + ] + } +} + +impl MetricPattern for MetricPattern27 { + fn get(&self, index: Index) -> Option> { + match index { + Index::MonthIndex => Some(self.by.by_monthindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern28By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern28By { + pub fn by_opreturnindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::OpReturnIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern28 { + client: Arc, + name: Arc, + pub by: MetricPattern28By, +} + +impl MetricPattern28 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern28By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern28 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::OpReturnIndex, + ] + } +} + +impl MetricPattern for MetricPattern28 { + fn get(&self, index: Index) -> Option> { + match index { + Index::OpReturnIndex => Some(self.by.by_opreturnindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern29By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern29By { + pub fn by_txoutindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::TxOutIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern29 { + client: Arc, + name: Arc, + pub by: MetricPattern29By, +} + +impl MetricPattern29 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern29By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern29 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::TxOutIndex, + ] + } +} + +impl MetricPattern for MetricPattern29 { + fn get(&self, index: Index) -> Option> { + match index { + Index::TxOutIndex => Some(self.by.by_txoutindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern30By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern30By { + pub fn by_p2aaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2AAddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern30 { + client: Arc, + name: Arc, + pub by: MetricPattern30By, +} + +impl MetricPattern30 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern30By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern30 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::P2AAddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern30 { + fn get(&self, index: Index) -> Option> { + match index { + Index::P2AAddressIndex => Some(self.by.by_p2aaddressindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern31By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern31By { + pub fn by_p2msoutputindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2MSOutputIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern31 { + client: Arc, + name: Arc, + pub by: MetricPattern31By, +} + +impl MetricPattern31 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern31By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern31 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::P2MSOutputIndex, + ] + } +} + +impl MetricPattern for MetricPattern31 { + fn get(&self, index: Index) -> Option> { + match index { + Index::P2MSOutputIndex => Some(self.by.by_p2msoutputindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern32By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern32By { + pub fn by_p2pk33addressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2PK33AddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern32 { + client: Arc, + name: Arc, + pub by: MetricPattern32By, +} + +impl MetricPattern32 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern32By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern32 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::P2PK33AddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern32 { + fn get(&self, index: Index) -> Option> { + match index { + Index::P2PK33AddressIndex => Some(self.by.by_p2pk33addressindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern33By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern33By { + pub fn by_p2pk65addressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2PK65AddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern33 { + client: Arc, + name: Arc, + pub by: MetricPattern33By, +} + +impl MetricPattern33 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern33By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern33 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::P2PK65AddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern33 { + fn get(&self, index: Index) -> Option> { + match index { + Index::P2PK65AddressIndex => Some(self.by.by_p2pk65addressindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern34By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern34By { + pub fn by_p2pkhaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2PKHAddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern34 { + client: Arc, + name: Arc, + pub by: MetricPattern34By, +} + +impl MetricPattern34 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern34By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern34 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::P2PKHAddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern34 { + fn get(&self, index: Index) -> Option> { + match index { + Index::P2PKHAddressIndex => Some(self.by.by_p2pkhaddressindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern35By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern35By { + pub fn by_p2shaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2SHAddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern35 { + client: Arc, + name: Arc, + pub by: MetricPattern35By, +} + +impl MetricPattern35 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern35By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern35 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::P2SHAddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern35 { + fn get(&self, index: Index) -> Option> { + match index { + Index::P2SHAddressIndex => Some(self.by.by_p2shaddressindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern36By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern36By { + pub fn by_p2traddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2TRAddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern36 { + client: Arc, + name: Arc, + pub by: MetricPattern36By, +} + +impl MetricPattern36 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern36By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern36 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::P2TRAddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern36 { + fn get(&self, index: Index) -> Option> { + match index { + Index::P2TRAddressIndex => Some(self.by.by_p2traddressindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern37By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern37By { + pub fn by_p2wpkhaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2WPKHAddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern37 { + client: Arc, + name: Arc, + pub by: MetricPattern37By, +} + +impl MetricPattern37 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern37By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern37 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::P2WPKHAddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern37 { + fn get(&self, index: Index) -> Option> { + match index { + Index::P2WPKHAddressIndex => Some(self.by.by_p2wpkhaddressindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern38By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern38By { + pub fn by_p2wshaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2WSHAddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern38 { + client: Arc, + name: Arc, + pub by: MetricPattern38By, +} + +impl MetricPattern38 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern38By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern38 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::P2WSHAddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern38 { + fn get(&self, index: Index) -> Option> { + match index { + Index::P2WSHAddressIndex => Some(self.by.by_p2wshaddressindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern39By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern39By { + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern39 { + client: Arc, + name: Arc, + pub by: MetricPattern39By, +} + +impl MetricPattern39 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern39By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern39 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::QuarterIndex, + ] + } +} + +impl MetricPattern for MetricPattern39 { + fn get(&self, index: Index) -> Option> { + match index { + Index::QuarterIndex => Some(self.by.by_quarterindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern40By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern40By { + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern40 { + client: Arc, + name: Arc, + pub by: MetricPattern40By, +} + +impl MetricPattern40 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern40By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern40 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::SemesterIndex, + ] + } +} + +impl MetricPattern for MetricPattern40 { + fn get(&self, index: Index) -> Option> { + match index { + Index::SemesterIndex => Some(self.by.by_semesterindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern41By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern41By { + pub fn by_txindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::TxIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern41 { + client: Arc, + name: Arc, + pub by: MetricPattern41By, +} + +impl MetricPattern41 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern41By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern41 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::TxIndex, + ] + } +} + +impl MetricPattern for MetricPattern41 { + fn get(&self, index: Index) -> Option> { + match index { + Index::TxIndex => Some(self.by.by_txindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern42By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern42By { + pub fn by_unknownoutputindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::UnknownOutputIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern42 { + client: Arc, + name: Arc, + pub by: MetricPattern42By, +} + +impl MetricPattern42 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern42By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern42 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::UnknownOutputIndex, + ] + } +} + +impl MetricPattern for MetricPattern42 { + fn get(&self, index: Index) -> Option> { + match index { + Index::UnknownOutputIndex => Some(self.by.by_unknownoutputindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern43By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern43By { + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern43 { + client: Arc, + name: Arc, + pub by: MetricPattern43By, +} + +impl MetricPattern43 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern43By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern43 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::WeekIndex, + ] + } +} + +impl MetricPattern for MetricPattern43 { + fn get(&self, index: Index) -> Option> { + match index { + Index::WeekIndex => Some(self.by.by_weekindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern44By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern44By { + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern44 { + client: Arc, + name: Arc, + pub by: MetricPattern44By, +} + +impl MetricPattern44 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern44By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern44 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::YearIndex, + ] + } +} + +impl MetricPattern for MetricPattern44 { + fn get(&self, index: Index) -> Option> { + match index { + Index::YearIndex => Some(self.by.by_yearindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern45By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern45By { + pub fn by_loadedaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::LoadedAddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern45 { + client: Arc, + name: Arc, + pub by: MetricPattern45By, +} + +impl MetricPattern45 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern45By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern45 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::LoadedAddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern45 { + fn get(&self, index: Index) -> Option> { + match index { + Index::LoadedAddressIndex => Some(self.by.by_loadedaddressindex()), + _ => None, + } + } +} + +/// Container for index endpoint methods. +pub struct MetricPattern46By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} + +impl MetricPattern46By { + pub fn by_emptyaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::EmptyAddressIndex) + } +} + +/// Index accessor for metrics with 1 indexes. +pub struct MetricPattern46 { + client: Arc, + name: Arc, + pub by: MetricPattern46By, +} + +impl MetricPattern46 { + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + client: client.clone(), + name: name.clone(), + by: MetricPattern46By { + client, + name, + _marker: std::marker::PhantomData, + } + } + } + + /// Get the metric name. + pub fn name(&self) -> &str { + &self.name + } +} + +impl AnyMetricPattern for MetricPattern46 { + fn name(&self) -> &str { + &self.name + } + + fn indexes(&self) -> &'static [Index] { + &[ + Index::EmptyAddressIndex, + ] + } +} + +impl MetricPattern for MetricPattern46 { + fn get(&self, index: Index) -> Option> { + match index { + Index::EmptyAddressIndex => Some(self.by.by_emptyaddressindex()), + _ => None, } } } @@ -609,457 +3250,472 @@ impl Indexes30 { /// Pattern struct for repeated tree structure. pub struct RealizedPattern3 { - pub adjusted_sopr: Indexes5, - pub adjusted_sopr_30d_ema: Indexes5, - pub adjusted_sopr_7d_ema: Indexes5, - pub adjusted_value_created: Indexes3, - pub adjusted_value_destroyed: Indexes3, + pub adjusted_sopr: MetricPattern21, + pub adjusted_sopr_30d_ema: MetricPattern21, + pub adjusted_sopr_7d_ema: MetricPattern21, + pub adjusted_value_created: MetricPattern1, + pub adjusted_value_destroyed: MetricPattern1, + pub mvrv: MetricPattern4, pub neg_realized_loss: BlockCountPattern, pub net_realized_pnl: BlockCountPattern, - pub net_realized_pnl_cumulative_30d_delta: Indexes, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: Indexes, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: Indexes, - pub net_realized_pnl_rel_to_realized_cap: Indexes2, - pub realized_cap: Indexes3, - pub realized_cap_30d_delta: Indexes, - pub realized_cap_rel_to_own_market_cap: Indexes3, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, + pub net_realized_pnl_rel_to_realized_cap: MetricPattern25, + pub realized_cap: MetricPattern1, + pub realized_cap_30d_delta: MetricPattern4, + pub realized_cap_rel_to_own_market_cap: MetricPattern1, pub realized_loss: BlockCountPattern, - pub realized_loss_rel_to_realized_cap: Indexes2, - pub realized_price: Indexes3, + pub realized_loss_rel_to_realized_cap: MetricPattern25, + pub realized_price: MetricPattern1, pub realized_price_extra: ActivePriceRatioPattern, pub realized_profit: BlockCountPattern, - pub realized_profit_rel_to_realized_cap: Indexes2, - pub realized_profit_to_loss_ratio: Indexes5, - pub realized_value: Indexes3, - pub sell_side_risk_ratio: Indexes5, - pub sell_side_risk_ratio_30d_ema: Indexes5, - pub sell_side_risk_ratio_7d_ema: Indexes5, - pub sopr: Indexes5, - pub sopr_30d_ema: Indexes5, - pub sopr_7d_ema: Indexes5, - pub total_realized_pnl: BitcoinPattern2, - pub value_created: Indexes3, - pub value_destroyed: Indexes3, + pub realized_profit_rel_to_realized_cap: MetricPattern25, + pub realized_profit_to_loss_ratio: MetricPattern21, + pub realized_value: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern21, + pub sell_side_risk_ratio_30d_ema: MetricPattern21, + pub sell_side_risk_ratio_7d_ema: MetricPattern21, + pub sopr: MetricPattern21, + pub sopr_30d_ema: MetricPattern21, + pub sopr_7d_ema: MetricPattern21, + pub total_realized_pnl: TotalRealizedPnlPattern, + pub value_created: MetricPattern1, + pub value_destroyed: MetricPattern1, } impl RealizedPattern3 { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - adjusted_sopr: Indexes5::new(client.clone(), &format!("{base_path}_adjusted_sopr")), - adjusted_sopr_30d_ema: Indexes5::new(client.clone(), &format!("{base_path}_adjusted_sopr_30d_ema")), - adjusted_sopr_7d_ema: Indexes5::new(client.clone(), &format!("{base_path}_adjusted_sopr_7d_ema")), - adjusted_value_created: Indexes3::new(client.clone(), &format!("{base_path}_adjusted_value_created")), - adjusted_value_destroyed: Indexes3::new(client.clone(), &format!("{base_path}_adjusted_value_destroyed")), - neg_realized_loss: BlockCountPattern::new(client.clone(), &format!("{base_path}_neg_realized_loss")), - net_realized_pnl: BlockCountPattern::new(client.clone(), &format!("{base_path}_net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: Indexes::new(client.clone(), &format!("{base_path}_net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: Indexes::new(client.clone(), &format!("{base_path}_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: Indexes::new(client.clone(), &format!("{base_path}_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), - net_realized_pnl_rel_to_realized_cap: Indexes2::new(client.clone(), &format!("{base_path}_net_realized_pnl_rel_to_realized_cap")), - realized_cap: Indexes3::new(client.clone(), &format!("{base_path}_realized_cap")), - realized_cap_30d_delta: Indexes::new(client.clone(), &format!("{base_path}_realized_cap_30d_delta")), - realized_cap_rel_to_own_market_cap: Indexes3::new(client.clone(), &format!("{base_path}_realized_cap_rel_to_own_market_cap")), - realized_loss: BlockCountPattern::new(client.clone(), &format!("{base_path}_realized_loss")), - realized_loss_rel_to_realized_cap: Indexes2::new(client.clone(), &format!("{base_path}_realized_loss_rel_to_realized_cap")), - realized_price: Indexes3::new(client.clone(), &format!("{base_path}_realized_price")), - realized_price_extra: ActivePriceRatioPattern::new(client.clone(), &format!("{base_path}_realized_price_extra")), - realized_profit: BlockCountPattern::new(client.clone(), &format!("{base_path}_realized_profit")), - realized_profit_rel_to_realized_cap: Indexes2::new(client.clone(), &format!("{base_path}_realized_profit_rel_to_realized_cap")), - realized_profit_to_loss_ratio: Indexes5::new(client.clone(), &format!("{base_path}_realized_profit_to_loss_ratio")), - realized_value: Indexes3::new(client.clone(), &format!("{base_path}_realized_value")), - sell_side_risk_ratio: Indexes5::new(client.clone(), &format!("{base_path}_sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sell_side_risk_ratio_7d_ema")), - sopr: Indexes5::new(client.clone(), &format!("{base_path}_sopr")), - sopr_30d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sopr_30d_ema")), - sopr_7d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sopr_7d_ema")), - total_realized_pnl: BitcoinPattern2::new(client.clone(), &format!("{base_path}_total_realized_pnl")), - value_created: Indexes3::new(client.clone(), &format!("{base_path}_value_created")), - value_destroyed: Indexes3::new(client.clone(), &format!("{base_path}_value_destroyed")), + adjusted_sopr: MetricPattern21::new(client.clone(), _m(&acc, "adjusted_sopr")), + adjusted_sopr_30d_ema: MetricPattern21::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), + adjusted_sopr_7d_ema: MetricPattern21::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), + adjusted_value_created: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created")), + adjusted_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed")), + mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), + neg_realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "neg_realized_loss")), + net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_rel_to_realized_cap: MetricPattern25::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), + realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), + realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_rel_to_own_market_cap")), + realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), + realized_loss_rel_to_realized_cap: MetricPattern25::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), + realized_price: MetricPattern1::new(client.clone(), _m(&acc, "realized_price")), + realized_price_extra: ActivePriceRatioPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), + realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), + realized_profit_rel_to_realized_cap: MetricPattern25::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), + realized_profit_to_loss_ratio: MetricPattern21::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), + realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), + sell_side_risk_ratio: MetricPattern21::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_30d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sopr: MetricPattern21::new(client.clone(), _m(&acc, "sopr")), + sopr_30d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sopr_7d_ema")), + total_realized_pnl: TotalRealizedPnlPattern::new(client.clone(), _m(&acc, "total_realized_pnl")), + value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), } } } /// Pattern struct for repeated tree structure. pub struct Ratio1ySdPattern { - pub _0sd_usd: Indexes, - pub m0_5sd: Indexes, - pub m0_5sd_usd: Indexes, - pub m1_5sd: Indexes, - pub m1_5sd_usd: Indexes, - pub m1sd: Indexes, - pub m1sd_usd: Indexes, - pub m2_5sd: Indexes, - pub m2_5sd_usd: Indexes, - pub m2sd: Indexes, - pub m2sd_usd: Indexes, - pub m3sd: Indexes, - pub m3sd_usd: Indexes, - pub p0_5sd: Indexes, - pub p0_5sd_usd: Indexes, - pub p1_5sd: Indexes, - pub p1_5sd_usd: Indexes, - pub p1sd: Indexes, - pub p1sd_usd: Indexes, - pub p2_5sd: Indexes, - pub p2_5sd_usd: Indexes, - pub p2sd: Indexes, - pub p2sd_usd: Indexes, - pub p3sd: Indexes, - pub p3sd_usd: Indexes, - pub sd: Indexes, - pub sma: Indexes, - pub zscore: Indexes, + pub _0sd_usd: MetricPattern4, + pub m0_5sd: MetricPattern4, + pub m0_5sd_usd: MetricPattern4, + pub m1_5sd: MetricPattern4, + pub m1_5sd_usd: MetricPattern4, + pub m1sd: MetricPattern4, + pub m1sd_usd: MetricPattern4, + pub m2_5sd: MetricPattern4, + pub m2_5sd_usd: MetricPattern4, + pub m2sd: MetricPattern4, + pub m2sd_usd: MetricPattern4, + pub m3sd: MetricPattern4, + pub m3sd_usd: MetricPattern4, + pub p0_5sd: MetricPattern4, + pub p0_5sd_usd: MetricPattern4, + pub p1_5sd: MetricPattern4, + pub p1_5sd_usd: MetricPattern4, + pub p1sd: MetricPattern4, + pub p1sd_usd: MetricPattern4, + pub p2_5sd: MetricPattern4, + pub p2_5sd_usd: MetricPattern4, + pub p2sd: MetricPattern4, + pub p2sd_usd: MetricPattern4, + pub p3sd: MetricPattern4, + pub p3sd_usd: MetricPattern4, + pub sd: MetricPattern4, + pub sma: MetricPattern4, + pub zscore: MetricPattern4, } impl Ratio1ySdPattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - _0sd_usd: Indexes::new(client.clone(), &format!("{base_path}__0sd_usd")), - m0_5sd: Indexes::new(client.clone(), &format!("{base_path}_m0_5sd")), - m0_5sd_usd: Indexes::new(client.clone(), &format!("{base_path}_m0_5sd_usd")), - m1_5sd: Indexes::new(client.clone(), &format!("{base_path}_m1_5sd")), - m1_5sd_usd: Indexes::new(client.clone(), &format!("{base_path}_m1_5sd_usd")), - m1sd: Indexes::new(client.clone(), &format!("{base_path}_m1sd")), - m1sd_usd: Indexes::new(client.clone(), &format!("{base_path}_m1sd_usd")), - m2_5sd: Indexes::new(client.clone(), &format!("{base_path}_m2_5sd")), - m2_5sd_usd: Indexes::new(client.clone(), &format!("{base_path}_m2_5sd_usd")), - m2sd: Indexes::new(client.clone(), &format!("{base_path}_m2sd")), - m2sd_usd: Indexes::new(client.clone(), &format!("{base_path}_m2sd_usd")), - m3sd: Indexes::new(client.clone(), &format!("{base_path}_m3sd")), - m3sd_usd: Indexes::new(client.clone(), &format!("{base_path}_m3sd_usd")), - p0_5sd: Indexes::new(client.clone(), &format!("{base_path}_p0_5sd")), - p0_5sd_usd: Indexes::new(client.clone(), &format!("{base_path}_p0_5sd_usd")), - p1_5sd: Indexes::new(client.clone(), &format!("{base_path}_p1_5sd")), - p1_5sd_usd: Indexes::new(client.clone(), &format!("{base_path}_p1_5sd_usd")), - p1sd: Indexes::new(client.clone(), &format!("{base_path}_p1sd")), - p1sd_usd: Indexes::new(client.clone(), &format!("{base_path}_p1sd_usd")), - p2_5sd: Indexes::new(client.clone(), &format!("{base_path}_p2_5sd")), - p2_5sd_usd: Indexes::new(client.clone(), &format!("{base_path}_p2_5sd_usd")), - p2sd: Indexes::new(client.clone(), &format!("{base_path}_p2sd")), - p2sd_usd: Indexes::new(client.clone(), &format!("{base_path}_p2sd_usd")), - p3sd: Indexes::new(client.clone(), &format!("{base_path}_p3sd")), - p3sd_usd: Indexes::new(client.clone(), &format!("{base_path}_p3sd_usd")), - sd: Indexes::new(client.clone(), &format!("{base_path}_sd")), - sma: Indexes::new(client.clone(), &format!("{base_path}_sma")), - zscore: Indexes::new(client.clone(), &format!("{base_path}_zscore")), + _0sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "0sd_usd")), + m0_5sd: MetricPattern4::new(client.clone(), _m(&acc, "m0_5sd")), + m0_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m0_5sd_usd")), + m1_5sd: MetricPattern4::new(client.clone(), _m(&acc, "m1_5sd")), + m1_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m1_5sd_usd")), + m1sd: MetricPattern4::new(client.clone(), _m(&acc, "m1sd")), + m1sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m1sd_usd")), + m2_5sd: MetricPattern4::new(client.clone(), _m(&acc, "m2_5sd")), + m2_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m2_5sd_usd")), + m2sd: MetricPattern4::new(client.clone(), _m(&acc, "m2sd")), + m2sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m2sd_usd")), + m3sd: MetricPattern4::new(client.clone(), _m(&acc, "m3sd")), + m3sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m3sd_usd")), + p0_5sd: MetricPattern4::new(client.clone(), _m(&acc, "p0_5sd")), + p0_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p0_5sd_usd")), + p1_5sd: MetricPattern4::new(client.clone(), _m(&acc, "p1_5sd")), + p1_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p1_5sd_usd")), + p1sd: MetricPattern4::new(client.clone(), _m(&acc, "p1sd")), + p1sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p1sd_usd")), + p2_5sd: MetricPattern4::new(client.clone(), _m(&acc, "p2_5sd")), + p2_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p2_5sd_usd")), + p2sd: MetricPattern4::new(client.clone(), _m(&acc, "p2sd")), + p2sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p2sd_usd")), + p3sd: MetricPattern4::new(client.clone(), _m(&acc, "p3sd")), + p3sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p3sd_usd")), + sd: MetricPattern4::new(client.clone(), _m(&acc, "sd")), + sma: MetricPattern4::new(client.clone(), _m(&acc, "sma")), + zscore: MetricPattern4::new(client.clone(), _m(&acc, "zscore")), } } } /// Pattern struct for repeated tree structure. pub struct RealizedPattern2 { + pub mvrv: MetricPattern4, pub neg_realized_loss: BlockCountPattern, pub net_realized_pnl: BlockCountPattern, - pub net_realized_pnl_cumulative_30d_delta: Indexes, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: Indexes, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: Indexes, - pub net_realized_pnl_rel_to_realized_cap: Indexes2, - pub realized_cap: Indexes3, - pub realized_cap_30d_delta: Indexes, - pub realized_cap_rel_to_own_market_cap: Indexes3, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, + pub net_realized_pnl_rel_to_realized_cap: MetricPattern25, + pub realized_cap: MetricPattern1, + pub realized_cap_30d_delta: MetricPattern4, + pub realized_cap_rel_to_own_market_cap: MetricPattern1, pub realized_loss: BlockCountPattern, - pub realized_loss_rel_to_realized_cap: Indexes2, - pub realized_price: Indexes3, + pub realized_loss_rel_to_realized_cap: MetricPattern25, + pub realized_price: MetricPattern1, pub realized_price_extra: ActivePriceRatioPattern, pub realized_profit: BlockCountPattern, - pub realized_profit_rel_to_realized_cap: Indexes2, - pub realized_profit_to_loss_ratio: Indexes5, - pub realized_value: Indexes3, - pub sell_side_risk_ratio: Indexes5, - pub sell_side_risk_ratio_30d_ema: Indexes5, - pub sell_side_risk_ratio_7d_ema: Indexes5, - pub sopr: Indexes5, - pub sopr_30d_ema: Indexes5, - pub sopr_7d_ema: Indexes5, - pub total_realized_pnl: BitcoinPattern2, - pub value_created: Indexes3, - pub value_destroyed: Indexes3, + pub realized_profit_rel_to_realized_cap: MetricPattern25, + pub realized_profit_to_loss_ratio: MetricPattern21, + pub realized_value: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern21, + pub sell_side_risk_ratio_30d_ema: MetricPattern21, + pub sell_side_risk_ratio_7d_ema: MetricPattern21, + pub sopr: MetricPattern21, + pub sopr_30d_ema: MetricPattern21, + pub sopr_7d_ema: MetricPattern21, + pub total_realized_pnl: TotalRealizedPnlPattern, + pub value_created: MetricPattern1, + pub value_destroyed: MetricPattern1, } impl RealizedPattern2 { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - neg_realized_loss: BlockCountPattern::new(client.clone(), &format!("{base_path}_neg_realized_loss")), - net_realized_pnl: BlockCountPattern::new(client.clone(), &format!("{base_path}_net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: Indexes::new(client.clone(), &format!("{base_path}_net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: Indexes::new(client.clone(), &format!("{base_path}_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: Indexes::new(client.clone(), &format!("{base_path}_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), - net_realized_pnl_rel_to_realized_cap: Indexes2::new(client.clone(), &format!("{base_path}_net_realized_pnl_rel_to_realized_cap")), - realized_cap: Indexes3::new(client.clone(), &format!("{base_path}_realized_cap")), - realized_cap_30d_delta: Indexes::new(client.clone(), &format!("{base_path}_realized_cap_30d_delta")), - realized_cap_rel_to_own_market_cap: Indexes3::new(client.clone(), &format!("{base_path}_realized_cap_rel_to_own_market_cap")), - realized_loss: BlockCountPattern::new(client.clone(), &format!("{base_path}_realized_loss")), - realized_loss_rel_to_realized_cap: Indexes2::new(client.clone(), &format!("{base_path}_realized_loss_rel_to_realized_cap")), - realized_price: Indexes3::new(client.clone(), &format!("{base_path}_realized_price")), - realized_price_extra: ActivePriceRatioPattern::new(client.clone(), &format!("{base_path}_realized_price_extra")), - realized_profit: BlockCountPattern::new(client.clone(), &format!("{base_path}_realized_profit")), - realized_profit_rel_to_realized_cap: Indexes2::new(client.clone(), &format!("{base_path}_realized_profit_rel_to_realized_cap")), - realized_profit_to_loss_ratio: Indexes5::new(client.clone(), &format!("{base_path}_realized_profit_to_loss_ratio")), - realized_value: Indexes3::new(client.clone(), &format!("{base_path}_realized_value")), - sell_side_risk_ratio: Indexes5::new(client.clone(), &format!("{base_path}_sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sell_side_risk_ratio_7d_ema")), - sopr: Indexes5::new(client.clone(), &format!("{base_path}_sopr")), - sopr_30d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sopr_30d_ema")), - sopr_7d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sopr_7d_ema")), - total_realized_pnl: BitcoinPattern2::new(client.clone(), &format!("{base_path}_total_realized_pnl")), - value_created: Indexes3::new(client.clone(), &format!("{base_path}_value_created")), - value_destroyed: Indexes3::new(client.clone(), &format!("{base_path}_value_destroyed")), + mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), + neg_realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "neg_realized_loss")), + net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_rel_to_realized_cap: MetricPattern25::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), + realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), + realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_rel_to_own_market_cap")), + realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), + realized_loss_rel_to_realized_cap: MetricPattern25::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), + realized_price: MetricPattern1::new(client.clone(), _m(&acc, "realized_price")), + realized_price_extra: ActivePriceRatioPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), + realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), + realized_profit_rel_to_realized_cap: MetricPattern25::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), + realized_profit_to_loss_ratio: MetricPattern21::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), + realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), + sell_side_risk_ratio: MetricPattern21::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_30d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sopr: MetricPattern21::new(client.clone(), _m(&acc, "sopr")), + sopr_30d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sopr_7d_ema")), + total_realized_pnl: TotalRealizedPnlPattern::new(client.clone(), _m(&acc, "total_realized_pnl")), + value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), } } } /// Pattern struct for repeated tree structure. pub struct RealizedPattern { + pub mvrv: MetricPattern4, pub neg_realized_loss: BlockCountPattern, pub net_realized_pnl: BlockCountPattern, - pub net_realized_pnl_cumulative_30d_delta: Indexes, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: Indexes, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: Indexes, - pub net_realized_pnl_rel_to_realized_cap: Indexes2, - pub realized_cap: Indexes3, - pub realized_cap_30d_delta: Indexes, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, + pub net_realized_pnl_rel_to_realized_cap: MetricPattern25, + pub realized_cap: MetricPattern1, + pub realized_cap_30d_delta: MetricPattern4, pub realized_loss: BlockCountPattern, - pub realized_loss_rel_to_realized_cap: Indexes2, - pub realized_price: Indexes3, + pub realized_loss_rel_to_realized_cap: MetricPattern25, + pub realized_price: MetricPattern1, pub realized_price_extra: RealizedPriceExtraPattern, pub realized_profit: BlockCountPattern, - pub realized_profit_rel_to_realized_cap: Indexes2, - pub realized_value: Indexes3, - pub sell_side_risk_ratio: Indexes5, - pub sell_side_risk_ratio_30d_ema: Indexes5, - pub sell_side_risk_ratio_7d_ema: Indexes5, - pub sopr: Indexes5, - pub sopr_30d_ema: Indexes5, - pub sopr_7d_ema: Indexes5, - pub total_realized_pnl: BitcoinPattern2, - pub value_created: Indexes3, - pub value_destroyed: Indexes3, + pub realized_profit_rel_to_realized_cap: MetricPattern25, + pub realized_value: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern21, + pub sell_side_risk_ratio_30d_ema: MetricPattern21, + pub sell_side_risk_ratio_7d_ema: MetricPattern21, + pub sopr: MetricPattern21, + pub sopr_30d_ema: MetricPattern21, + pub sopr_7d_ema: MetricPattern21, + pub total_realized_pnl: TotalRealizedPnlPattern, + pub value_created: MetricPattern1, + pub value_destroyed: MetricPattern1, } impl RealizedPattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - neg_realized_loss: BlockCountPattern::new(client.clone(), &format!("{base_path}_neg_realized_loss")), - net_realized_pnl: BlockCountPattern::new(client.clone(), &format!("{base_path}_net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: Indexes::new(client.clone(), &format!("{base_path}_net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: Indexes::new(client.clone(), &format!("{base_path}_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: Indexes::new(client.clone(), &format!("{base_path}_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), - net_realized_pnl_rel_to_realized_cap: Indexes2::new(client.clone(), &format!("{base_path}_net_realized_pnl_rel_to_realized_cap")), - realized_cap: Indexes3::new(client.clone(), &format!("{base_path}_realized_cap")), - realized_cap_30d_delta: Indexes::new(client.clone(), &format!("{base_path}_realized_cap_30d_delta")), - realized_loss: BlockCountPattern::new(client.clone(), &format!("{base_path}_realized_loss")), - realized_loss_rel_to_realized_cap: Indexes2::new(client.clone(), &format!("{base_path}_realized_loss_rel_to_realized_cap")), - realized_price: Indexes3::new(client.clone(), &format!("{base_path}_realized_price")), - realized_price_extra: RealizedPriceExtraPattern::new(client.clone(), &format!("{base_path}_realized_price_extra")), - realized_profit: BlockCountPattern::new(client.clone(), &format!("{base_path}_realized_profit")), - realized_profit_rel_to_realized_cap: Indexes2::new(client.clone(), &format!("{base_path}_realized_profit_rel_to_realized_cap")), - realized_value: Indexes3::new(client.clone(), &format!("{base_path}_realized_value")), - sell_side_risk_ratio: Indexes5::new(client.clone(), &format!("{base_path}_sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sell_side_risk_ratio_7d_ema")), - sopr: Indexes5::new(client.clone(), &format!("{base_path}_sopr")), - sopr_30d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sopr_30d_ema")), - sopr_7d_ema: Indexes5::new(client.clone(), &format!("{base_path}_sopr_7d_ema")), - total_realized_pnl: BitcoinPattern2::new(client.clone(), &format!("{base_path}_total_realized_pnl")), - value_created: Indexes3::new(client.clone(), &format!("{base_path}_value_created")), - value_destroyed: Indexes3::new(client.clone(), &format!("{base_path}_value_destroyed")), + mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), + neg_realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "neg_realized_loss")), + net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_rel_to_realized_cap: MetricPattern25::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), + realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), + realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), + realized_loss_rel_to_realized_cap: MetricPattern25::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), + realized_price: MetricPattern1::new(client.clone(), _m(&acc, "realized_price")), + realized_price_extra: RealizedPriceExtraPattern::new(client.clone(), _m(&acc, "realized_price")), + realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), + realized_profit_rel_to_realized_cap: MetricPattern25::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), + realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), + sell_side_risk_ratio: MetricPattern21::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_30d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sopr: MetricPattern21::new(client.clone(), _m(&acc, "sopr")), + sopr_30d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d_ema: MetricPattern21::new(client.clone(), _m(&acc, "sopr_7d_ema")), + total_realized_pnl: TotalRealizedPnlPattern::new(client.clone(), _m(&acc, "total_realized_pnl")), + value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), } } } /// Pattern struct for repeated tree structure. -pub struct Price13dEmaPattern { - pub price: Indexes, - pub ratio: Indexes, - pub ratio_1m_sma: Indexes, - pub ratio_1w_sma: Indexes, +pub struct Price111dSmaPattern { + pub price: MetricPattern4, + pub ratio: MetricPattern4, + pub ratio_1m_sma: MetricPattern4, + pub ratio_1w_sma: MetricPattern4, pub ratio_1y_sd: Ratio1ySdPattern, pub ratio_2y_sd: Ratio1ySdPattern, pub ratio_4y_sd: Ratio1ySdPattern, - pub ratio_pct1: Indexes, - pub ratio_pct1_usd: Indexes, - pub ratio_pct2: Indexes, - pub ratio_pct2_usd: Indexes, - pub ratio_pct5: Indexes, - pub ratio_pct5_usd: Indexes, - pub ratio_pct95: Indexes, - pub ratio_pct95_usd: Indexes, - pub ratio_pct98: Indexes, - pub ratio_pct98_usd: Indexes, - pub ratio_pct99: Indexes, - pub ratio_pct99_usd: Indexes, + pub ratio_pct1: MetricPattern4, + pub ratio_pct1_usd: MetricPattern4, + pub ratio_pct2: MetricPattern4, + pub ratio_pct2_usd: MetricPattern4, + pub ratio_pct5: MetricPattern4, + pub ratio_pct5_usd: MetricPattern4, + pub ratio_pct95: MetricPattern4, + pub ratio_pct95_usd: MetricPattern4, + pub ratio_pct98: MetricPattern4, + pub ratio_pct98_usd: MetricPattern4, + pub ratio_pct99: MetricPattern4, + pub ratio_pct99_usd: MetricPattern4, pub ratio_sd: Ratio1ySdPattern, } -impl Price13dEmaPattern { +impl Price111dSmaPattern { /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: &str) -> Self { + pub fn new(client: Arc, acc: String) -> Self { Self { - price: Indexes::new(client.clone(), &acc.to_string()), - ratio: Indexes::new(client.clone(), &format!("{acc}_ratio")), - ratio_1m_sma: Indexes::new(client.clone(), &format!("{acc}_ratio_1m_sma")), - ratio_1w_sma: Indexes::new(client.clone(), &format!("{acc}_ratio_1w_sma")), - ratio_1y_sd: Ratio1ySdPattern::new(client.clone(), &format!("{acc}_ratio_1y_sd")), - ratio_2y_sd: Ratio1ySdPattern::new(client.clone(), &format!("{acc}_ratio_2y_sd")), - ratio_4y_sd: Ratio1ySdPattern::new(client.clone(), &format!("{acc}_ratio_4y_sd")), - ratio_pct1: Indexes::new(client.clone(), &format!("{acc}_ratio_pct1")), - ratio_pct1_usd: Indexes::new(client.clone(), &format!("{acc}_ratio_pct1_usd")), - ratio_pct2: Indexes::new(client.clone(), &format!("{acc}_ratio_pct2")), - ratio_pct2_usd: Indexes::new(client.clone(), &format!("{acc}_ratio_pct2_usd")), - ratio_pct5: Indexes::new(client.clone(), &format!("{acc}_ratio_pct5")), - ratio_pct5_usd: Indexes::new(client.clone(), &format!("{acc}_ratio_pct5_usd")), - ratio_pct95: Indexes::new(client.clone(), &format!("{acc}_ratio_pct95")), - ratio_pct95_usd: Indexes::new(client.clone(), &format!("{acc}_ratio_pct95_usd")), - ratio_pct98: Indexes::new(client.clone(), &format!("{acc}_ratio_pct98")), - ratio_pct98_usd: Indexes::new(client.clone(), &format!("{acc}_ratio_pct98_usd")), - ratio_pct99: Indexes::new(client.clone(), &format!("{acc}_ratio_pct99")), - ratio_pct99_usd: Indexes::new(client.clone(), &format!("{acc}_ratio_pct99_usd")), - ratio_sd: Ratio1ySdPattern::new(client.clone(), &format!("{acc}_ratio_sd")), + price: MetricPattern4::new(client.clone(), acc.clone()), + ratio: MetricPattern4::new(client.clone(), _m(&acc, "ratio")), + ratio_1m_sma: MetricPattern4::new(client.clone(), _m(&acc, "ratio_1m_sma")), + ratio_1w_sma: MetricPattern4::new(client.clone(), _m(&acc, "ratio_1w_sma")), + ratio_1y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "ratio_1y")), + ratio_2y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "ratio_2y")), + ratio_4y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "ratio_4y")), + ratio_pct1: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct1")), + ratio_pct1_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct1_usd")), + ratio_pct2: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct2")), + ratio_pct2_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct2_usd")), + ratio_pct5: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct5")), + ratio_pct5_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct5_usd")), + ratio_pct95: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct95")), + ratio_pct95_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct95_usd")), + ratio_pct98: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct98")), + ratio_pct98_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct98_usd")), + ratio_pct99: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct99")), + ratio_pct99_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct99_usd")), + ratio_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "ratio")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct PercentilesPattern { + pub cost_basis_pct05: MetricPattern4, + pub cost_basis_pct10: MetricPattern4, + pub cost_basis_pct15: MetricPattern4, + pub cost_basis_pct20: MetricPattern4, + pub cost_basis_pct25: MetricPattern4, + pub cost_basis_pct30: MetricPattern4, + pub cost_basis_pct35: MetricPattern4, + pub cost_basis_pct40: MetricPattern4, + pub cost_basis_pct45: MetricPattern4, + pub cost_basis_pct50: MetricPattern4, + pub cost_basis_pct55: MetricPattern4, + pub cost_basis_pct60: MetricPattern4, + pub cost_basis_pct65: MetricPattern4, + pub cost_basis_pct70: MetricPattern4, + pub cost_basis_pct75: MetricPattern4, + pub cost_basis_pct80: MetricPattern4, + pub cost_basis_pct85: MetricPattern4, + pub cost_basis_pct90: MetricPattern4, + pub cost_basis_pct95: MetricPattern4, +} + +impl PercentilesPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + cost_basis_pct05: MetricPattern4::new(client.clone(), _m(&acc, "pct05")), + cost_basis_pct10: MetricPattern4::new(client.clone(), _m(&acc, "pct10")), + cost_basis_pct15: MetricPattern4::new(client.clone(), _m(&acc, "pct15")), + cost_basis_pct20: MetricPattern4::new(client.clone(), _m(&acc, "pct20")), + cost_basis_pct25: MetricPattern4::new(client.clone(), _m(&acc, "pct25")), + cost_basis_pct30: MetricPattern4::new(client.clone(), _m(&acc, "pct30")), + cost_basis_pct35: MetricPattern4::new(client.clone(), _m(&acc, "pct35")), + cost_basis_pct40: MetricPattern4::new(client.clone(), _m(&acc, "pct40")), + cost_basis_pct45: MetricPattern4::new(client.clone(), _m(&acc, "pct45")), + cost_basis_pct50: MetricPattern4::new(client.clone(), _m(&acc, "pct50")), + cost_basis_pct55: MetricPattern4::new(client.clone(), _m(&acc, "pct55")), + cost_basis_pct60: MetricPattern4::new(client.clone(), _m(&acc, "pct60")), + cost_basis_pct65: MetricPattern4::new(client.clone(), _m(&acc, "pct65")), + cost_basis_pct70: MetricPattern4::new(client.clone(), _m(&acc, "pct70")), + cost_basis_pct75: MetricPattern4::new(client.clone(), _m(&acc, "pct75")), + cost_basis_pct80: MetricPattern4::new(client.clone(), _m(&acc, "pct80")), + cost_basis_pct85: MetricPattern4::new(client.clone(), _m(&acc, "pct85")), + cost_basis_pct90: MetricPattern4::new(client.clone(), _m(&acc, "pct90")), + cost_basis_pct95: MetricPattern4::new(client.clone(), _m(&acc, "pct95")), } } } /// Pattern struct for repeated tree structure. pub struct ActivePriceRatioPattern { - pub ratio: Indexes, - pub ratio_1m_sma: Indexes, - pub ratio_1w_sma: Indexes, + pub ratio: MetricPattern4, + pub ratio_1m_sma: MetricPattern4, + pub ratio_1w_sma: MetricPattern4, pub ratio_1y_sd: Ratio1ySdPattern, pub ratio_2y_sd: Ratio1ySdPattern, pub ratio_4y_sd: Ratio1ySdPattern, - pub ratio_pct1: Indexes, - pub ratio_pct1_usd: Indexes, - pub ratio_pct2: Indexes, - pub ratio_pct2_usd: Indexes, - pub ratio_pct5: Indexes, - pub ratio_pct5_usd: Indexes, - pub ratio_pct95: Indexes, - pub ratio_pct95_usd: Indexes, - pub ratio_pct98: Indexes, - pub ratio_pct98_usd: Indexes, - pub ratio_pct99: Indexes, - pub ratio_pct99_usd: Indexes, + pub ratio_pct1: MetricPattern4, + pub ratio_pct1_usd: MetricPattern4, + pub ratio_pct2: MetricPattern4, + pub ratio_pct2_usd: MetricPattern4, + pub ratio_pct5: MetricPattern4, + pub ratio_pct5_usd: MetricPattern4, + pub ratio_pct95: MetricPattern4, + pub ratio_pct95_usd: MetricPattern4, + pub ratio_pct98: MetricPattern4, + pub ratio_pct98_usd: MetricPattern4, + pub ratio_pct99: MetricPattern4, + pub ratio_pct99_usd: MetricPattern4, pub ratio_sd: Ratio1ySdPattern, } impl ActivePriceRatioPattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - ratio: Indexes::new(client.clone(), &format!("{base_path}_ratio")), - ratio_1m_sma: Indexes::new(client.clone(), &format!("{base_path}_ratio_1m_sma")), - ratio_1w_sma: Indexes::new(client.clone(), &format!("{base_path}_ratio_1w_sma")), - ratio_1y_sd: Ratio1ySdPattern::new(client.clone(), &format!("{base_path}_ratio_1y_sd")), - ratio_2y_sd: Ratio1ySdPattern::new(client.clone(), &format!("{base_path}_ratio_2y_sd")), - ratio_4y_sd: Ratio1ySdPattern::new(client.clone(), &format!("{base_path}_ratio_4y_sd")), - ratio_pct1: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct1")), - ratio_pct1_usd: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct1_usd")), - ratio_pct2: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct2")), - ratio_pct2_usd: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct2_usd")), - ratio_pct5: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct5")), - ratio_pct5_usd: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct5_usd")), - ratio_pct95: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct95")), - ratio_pct95_usd: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct95_usd")), - ratio_pct98: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct98")), - ratio_pct98_usd: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct98_usd")), - ratio_pct99: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct99")), - ratio_pct99_usd: Indexes::new(client.clone(), &format!("{base_path}_ratio_pct99_usd")), - ratio_sd: Ratio1ySdPattern::new(client.clone(), &format!("{base_path}_ratio_sd")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct PricePercentilesPattern { - pub pct05: Indexes, - pub pct10: Indexes, - pub pct15: Indexes, - pub pct20: Indexes, - pub pct25: Indexes, - pub pct30: Indexes, - pub pct35: Indexes, - pub pct40: Indexes, - pub pct45: Indexes, - pub pct50: Indexes, - pub pct55: Indexes, - pub pct60: Indexes, - pub pct65: Indexes, - pub pct70: Indexes, - pub pct75: Indexes, - pub pct80: Indexes, - pub pct85: Indexes, - pub pct90: Indexes, - pub pct95: Indexes, -} - -impl PricePercentilesPattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - pct05: Indexes::new(client.clone(), &format!("{base_path}_pct05")), - pct10: Indexes::new(client.clone(), &format!("{base_path}_pct10")), - pct15: Indexes::new(client.clone(), &format!("{base_path}_pct15")), - pct20: Indexes::new(client.clone(), &format!("{base_path}_pct20")), - pct25: Indexes::new(client.clone(), &format!("{base_path}_pct25")), - pct30: Indexes::new(client.clone(), &format!("{base_path}_pct30")), - pct35: Indexes::new(client.clone(), &format!("{base_path}_pct35")), - pct40: Indexes::new(client.clone(), &format!("{base_path}_pct40")), - pct45: Indexes::new(client.clone(), &format!("{base_path}_pct45")), - pct50: Indexes::new(client.clone(), &format!("{base_path}_pct50")), - pct55: Indexes::new(client.clone(), &format!("{base_path}_pct55")), - pct60: Indexes::new(client.clone(), &format!("{base_path}_pct60")), - pct65: Indexes::new(client.clone(), &format!("{base_path}_pct65")), - pct70: Indexes::new(client.clone(), &format!("{base_path}_pct70")), - pct75: Indexes::new(client.clone(), &format!("{base_path}_pct75")), - pct80: Indexes::new(client.clone(), &format!("{base_path}_pct80")), - pct85: Indexes::new(client.clone(), &format!("{base_path}_pct85")), - pct90: Indexes::new(client.clone(), &format!("{base_path}_pct90")), - pct95: Indexes::new(client.clone(), &format!("{base_path}_pct95")), + ratio: MetricPattern4::new(client.clone(), acc.clone()), + ratio_1m_sma: MetricPattern4::new(client.clone(), _m(&acc, "1m_sma")), + ratio_1w_sma: MetricPattern4::new(client.clone(), _m(&acc, "1w_sma")), + ratio_1y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "1y")), + ratio_2y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "2y")), + ratio_4y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "4y")), + ratio_pct1: MetricPattern4::new(client.clone(), _m(&acc, "pct1")), + ratio_pct1_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct1_usd")), + ratio_pct2: MetricPattern4::new(client.clone(), _m(&acc, "pct2")), + ratio_pct2_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct2_usd")), + ratio_pct5: MetricPattern4::new(client.clone(), _m(&acc, "pct5")), + ratio_pct5_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct5_usd")), + ratio_pct95: MetricPattern4::new(client.clone(), _m(&acc, "pct95")), + ratio_pct95_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct95_usd")), + ratio_pct98: MetricPattern4::new(client.clone(), _m(&acc, "pct98")), + ratio_pct98_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct98_usd")), + ratio_pct99: MetricPattern4::new(client.clone(), _m(&acc, "pct99")), + ratio_pct99_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct99_usd")), + ratio_sd: Ratio1ySdPattern::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. pub struct RelativePattern2 { - pub neg_unrealized_loss_rel_to_market_cap: Indexes27, - pub neg_unrealized_loss_rel_to_own_market_cap: Indexes27, - pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27, - pub net_unrealized_pnl_rel_to_market_cap: Indexes26, - pub net_unrealized_pnl_rel_to_own_market_cap: Indexes26, - pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: Indexes26, - pub supply_in_loss_rel_to_circulating_supply: Indexes27, - pub supply_in_loss_rel_to_own_supply: Indexes27, - pub supply_in_profit_rel_to_circulating_supply: Indexes27, - pub supply_in_profit_rel_to_own_supply: Indexes27, - pub supply_rel_to_circulating_supply: Indexes, - pub unrealized_loss_rel_to_market_cap: Indexes27, - pub unrealized_loss_rel_to_own_market_cap: Indexes27, - pub unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27, - pub unrealized_profit_rel_to_market_cap: Indexes27, - pub unrealized_profit_rel_to_own_market_cap: Indexes27, - pub unrealized_profit_rel_to_own_total_unrealized_pnl: Indexes27, + pub neg_unrealized_loss_rel_to_market_cap: MetricPattern5, + pub neg_unrealized_loss_rel_to_own_market_cap: MetricPattern5, + pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5, + pub net_unrealized_pnl_rel_to_market_cap: MetricPattern3, + pub net_unrealized_pnl_rel_to_own_market_cap: MetricPattern3, + pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3, + pub nupl: MetricPattern4, + pub supply_in_loss_rel_to_circulating_supply: MetricPattern5, + pub supply_in_loss_rel_to_own_supply: MetricPattern5, + pub supply_in_profit_rel_to_circulating_supply: MetricPattern5, + pub supply_in_profit_rel_to_own_supply: MetricPattern5, + pub supply_rel_to_circulating_supply: MetricPattern4, + pub unrealized_loss_rel_to_market_cap: MetricPattern5, + pub unrealized_loss_rel_to_own_market_cap: MetricPattern5, + pub unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5, + pub unrealized_profit_rel_to_market_cap: MetricPattern5, + pub unrealized_profit_rel_to_own_market_cap: MetricPattern5, + pub unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern5, } impl RelativePattern2 { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - neg_unrealized_loss_rel_to_market_cap: Indexes27::new(client.clone(), &format!("{base_path}_neg_unrealized_loss_rel_to_market_cap")), - neg_unrealized_loss_rel_to_own_market_cap: Indexes27::new(client.clone(), &format!("{base_path}_neg_unrealized_loss_rel_to_own_market_cap")), - neg_unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27::new(client.clone(), &format!("{base_path}_neg_unrealized_loss_rel_to_own_total_unrealized_pnl")), - net_unrealized_pnl_rel_to_market_cap: Indexes26::new(client.clone(), &format!("{base_path}_net_unrealized_pnl_rel_to_market_cap")), - net_unrealized_pnl_rel_to_own_market_cap: Indexes26::new(client.clone(), &format!("{base_path}_net_unrealized_pnl_rel_to_own_market_cap")), - net_unrealized_pnl_rel_to_own_total_unrealized_pnl: Indexes26::new(client.clone(), &format!("{base_path}_net_unrealized_pnl_rel_to_own_total_unrealized_pnl")), - supply_in_loss_rel_to_circulating_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_loss_rel_to_circulating_supply")), - supply_in_loss_rel_to_own_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_loss_rel_to_own_supply")), - supply_in_profit_rel_to_circulating_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_profit_rel_to_circulating_supply")), - supply_in_profit_rel_to_own_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_profit_rel_to_own_supply")), - supply_rel_to_circulating_supply: Indexes::new(client.clone(), &format!("{base_path}_supply_rel_to_circulating_supply")), - unrealized_loss_rel_to_market_cap: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_loss_rel_to_market_cap")), - unrealized_loss_rel_to_own_market_cap: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_loss_rel_to_own_market_cap")), - unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_loss_rel_to_own_total_unrealized_pnl")), - unrealized_profit_rel_to_market_cap: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_profit_rel_to_market_cap")), - unrealized_profit_rel_to_own_market_cap: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_profit_rel_to_own_market_cap")), - unrealized_profit_rel_to_own_total_unrealized_pnl: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_profit_rel_to_own_total_unrealized_pnl")), + neg_unrealized_loss_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_market_cap")), + neg_unrealized_loss_rel_to_own_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_market_cap")), + neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_total_unrealized_pnl")), + net_unrealized_pnl_rel_to_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_market_cap")), + net_unrealized_pnl_rel_to_own_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_market_cap")), + net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_total_unrealized_pnl")), + nupl: MetricPattern4::new(client.clone(), _m(&acc, "nupl")), + supply_in_loss_rel_to_circulating_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_circulating_supply")), + supply_in_loss_rel_to_own_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), + supply_in_profit_rel_to_circulating_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_circulating_supply")), + supply_in_profit_rel_to_own_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), + supply_rel_to_circulating_supply: MetricPattern4::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), + unrealized_loss_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_market_cap")), + unrealized_loss_rel_to_own_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_market_cap")), + unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_total_unrealized_pnl")), + unrealized_profit_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_market_cap")), + unrealized_profit_rel_to_own_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_market_cap")), + unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern5::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_total_unrealized_pnl")), } } } @@ -1067,466 +3723,475 @@ impl RelativePattern2 { /// Pattern struct for repeated tree structure. pub struct AXbtPattern { pub _1d_dominance: BlockCountPattern, - pub _1m_blocks_mined: Indexes, - pub _1m_dominance: Indexes, - pub _1w_blocks_mined: Indexes, - pub _1w_dominance: Indexes, - pub _1y_blocks_mined: Indexes, - pub _1y_dominance: Indexes, + pub _1m_blocks_mined: MetricPattern4, + pub _1m_dominance: MetricPattern4, + pub _1w_blocks_mined: MetricPattern4, + pub _1w_dominance: MetricPattern4, + pub _1y_blocks_mined: MetricPattern4, + pub _1y_dominance: MetricPattern4, pub blocks_mined: BlockCountPattern, pub coinbase: UnclaimedRewardsPattern, - pub days_since_block: Indexes, + pub days_since_block: MetricPattern4, pub dominance: BlockCountPattern, - pub fee: FeePattern2, - pub subsidy: FeePattern2, + pub fee: SentPattern, + pub subsidy: SentPattern, } impl AXbtPattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - _1d_dominance: BlockCountPattern::new(client.clone(), &format!("{base_path}_1d_dominance")), - _1m_blocks_mined: Indexes::new(client.clone(), &format!("{base_path}_1m_blocks_mined")), - _1m_dominance: Indexes::new(client.clone(), &format!("{base_path}_1m_dominance")), - _1w_blocks_mined: Indexes::new(client.clone(), &format!("{base_path}_1w_blocks_mined")), - _1w_dominance: Indexes::new(client.clone(), &format!("{base_path}_1w_dominance")), - _1y_blocks_mined: Indexes::new(client.clone(), &format!("{base_path}_1y_blocks_mined")), - _1y_dominance: Indexes::new(client.clone(), &format!("{base_path}_1y_dominance")), - blocks_mined: BlockCountPattern::new(client.clone(), &format!("{base_path}_blocks_mined")), - coinbase: UnclaimedRewardsPattern::new(client.clone(), &format!("{base_path}_coinbase")), - days_since_block: Indexes::new(client.clone(), &format!("{base_path}_days_since_block")), - dominance: BlockCountPattern::new(client.clone(), &format!("{base_path}_dominance")), - fee: FeePattern2::new(client.clone(), &format!("{base_path}_fee")), - subsidy: FeePattern2::new(client.clone(), &format!("{base_path}_subsidy")), + _1d_dominance: BlockCountPattern::new(client.clone(), _m(&acc, "1d_dominance")), + _1m_blocks_mined: MetricPattern4::new(client.clone(), _m(&acc, "1m_blocks_mined")), + _1m_dominance: MetricPattern4::new(client.clone(), _m(&acc, "1m_dominance")), + _1w_blocks_mined: MetricPattern4::new(client.clone(), _m(&acc, "1w_blocks_mined")), + _1w_dominance: MetricPattern4::new(client.clone(), _m(&acc, "1w_dominance")), + _1y_blocks_mined: MetricPattern4::new(client.clone(), _m(&acc, "1y_blocks_mined")), + _1y_dominance: MetricPattern4::new(client.clone(), _m(&acc, "1y_dominance")), + blocks_mined: BlockCountPattern::new(client.clone(), _m(&acc, "blocks_mined")), + coinbase: UnclaimedRewardsPattern::new(client.clone(), _m(&acc, "coinbase")), + days_since_block: MetricPattern4::new(client.clone(), _m(&acc, "days_since_block")), + dominance: BlockCountPattern::new(client.clone(), _m(&acc, "dominance")), + fee: SentPattern::new(client.clone(), _m(&acc, "fee")), + subsidy: SentPattern::new(client.clone(), _m(&acc, "subsidy")), } } } /// Pattern struct for repeated tree structure. pub struct BitcoinPattern { - pub average: Indexes4, - pub base: Indexes2, - pub cumulative: Indexes3, - pub max: Indexes4, - pub median: Indexes5, - pub min: Indexes4, - pub pct10: Indexes5, - pub pct25: Indexes5, - pub pct75: Indexes5, - pub pct90: Indexes5, - pub sum: Indexes4, + pub average: MetricPattern2, + pub base: MetricPattern25, + pub cumulative: MetricPattern1, + pub max: MetricPattern2, + pub median: MetricPattern21, + pub min: MetricPattern2, + pub pct10: MetricPattern21, + pub pct25: MetricPattern21, + pub pct75: MetricPattern21, + pub pct90: MetricPattern21, + pub sum: MetricPattern2, } impl BitcoinPattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - average: Indexes4::new(client.clone(), &format!("{base_path}_average")), - base: Indexes2::new(client.clone(), &format!("{base_path}_base")), - cumulative: Indexes3::new(client.clone(), &format!("{base_path}_cumulative")), - max: Indexes4::new(client.clone(), &format!("{base_path}_max")), - median: Indexes5::new(client.clone(), &format!("{base_path}_median")), - min: Indexes4::new(client.clone(), &format!("{base_path}_min")), - pct10: Indexes5::new(client.clone(), &format!("{base_path}_pct10")), - pct25: Indexes5::new(client.clone(), &format!("{base_path}_pct25")), - pct75: Indexes5::new(client.clone(), &format!("{base_path}_pct75")), - pct90: Indexes5::new(client.clone(), &format!("{base_path}_pct90")), - sum: Indexes4::new(client.clone(), &format!("{base_path}_sum")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct BlockSizePattern { - pub average: Indexes4, - pub cumulative: Indexes3, - pub max: Indexes4, - pub median: Indexes5, - pub min: Indexes4, - pub pct10: Indexes5, - pub pct25: Indexes5, - pub pct75: Indexes5, - pub pct90: Indexes5, - pub sum: Indexes4, -} - -impl BlockSizePattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - average: Indexes4::new(client.clone(), &format!("{base_path}_average")), - cumulative: Indexes3::new(client.clone(), &format!("{base_path}_cumulative")), - max: Indexes4::new(client.clone(), &format!("{base_path}_max")), - median: Indexes5::new(client.clone(), &format!("{base_path}_median")), - min: Indexes4::new(client.clone(), &format!("{base_path}_min")), - pct10: Indexes5::new(client.clone(), &format!("{base_path}_pct10")), - pct25: Indexes5::new(client.clone(), &format!("{base_path}_pct25")), - pct75: Indexes5::new(client.clone(), &format!("{base_path}_pct75")), - pct90: Indexes5::new(client.clone(), &format!("{base_path}_pct90")), - sum: Indexes4::new(client.clone(), &format!("{base_path}_sum")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct UnrealizedPattern { - pub neg_unrealized_loss: Indexes26, - pub net_unrealized_pnl: Indexes26, - pub supply_in_loss: SupplyPattern, - pub supply_in_loss_value: SupplyValuePattern, - pub supply_in_profit: SupplyPattern, - pub supply_in_profit_value: SupplyValuePattern, - pub total_unrealized_pnl: Indexes26, - pub unrealized_loss: Indexes26, - pub unrealized_profit: Indexes26, -} - -impl UnrealizedPattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - neg_unrealized_loss: Indexes26::new(client.clone(), &format!("{base_path}_neg_unrealized_loss")), - net_unrealized_pnl: Indexes26::new(client.clone(), &format!("{base_path}_net_unrealized_pnl")), - supply_in_loss: SupplyPattern::new(client.clone(), &format!("{base_path}_supply_in_loss")), - supply_in_loss_value: SupplyValuePattern::new(client.clone(), &format!("{base_path}_supply_in_loss_value")), - supply_in_profit: SupplyPattern::new(client.clone(), &format!("{base_path}_supply_in_profit")), - supply_in_profit_value: SupplyValuePattern::new(client.clone(), &format!("{base_path}_supply_in_profit_value")), - total_unrealized_pnl: Indexes26::new(client.clone(), &format!("{base_path}_total_unrealized_pnl")), - unrealized_loss: Indexes26::new(client.clone(), &format!("{base_path}_unrealized_loss")), - unrealized_profit: Indexes26::new(client.clone(), &format!("{base_path}_unrealized_profit")), + average: MetricPattern2::new(client.clone(), _m(&acc, "avg")), + base: MetricPattern25::new(client.clone(), acc.clone()), + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), + max: MetricPattern2::new(client.clone(), _m(&acc, "max")), + median: MetricPattern21::new(client.clone(), _m(&acc, "median")), + min: MetricPattern2::new(client.clone(), _m(&acc, "min")), + pct10: MetricPattern21::new(client.clone(), _m(&acc, "pct10")), + pct25: MetricPattern21::new(client.clone(), _m(&acc, "pct25")), + pct75: MetricPattern21::new(client.clone(), _m(&acc, "pct75")), + pct90: MetricPattern21::new(client.clone(), _m(&acc, "pct90")), + sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), } } } /// Pattern struct for repeated tree structure. pub struct RelativePattern { - pub neg_unrealized_loss_rel_to_market_cap: Indexes27, - pub net_unrealized_pnl_rel_to_market_cap: Indexes26, - pub supply_in_loss_rel_to_circulating_supply: Indexes27, - pub supply_in_loss_rel_to_own_supply: Indexes27, - pub supply_in_profit_rel_to_circulating_supply: Indexes27, - pub supply_in_profit_rel_to_own_supply: Indexes27, - pub supply_rel_to_circulating_supply: Indexes, - pub unrealized_loss_rel_to_market_cap: Indexes27, - pub unrealized_profit_rel_to_market_cap: Indexes27, + pub neg_unrealized_loss_rel_to_market_cap: MetricPattern5, + pub net_unrealized_pnl_rel_to_market_cap: MetricPattern3, + pub nupl: MetricPattern4, + pub supply_in_loss_rel_to_circulating_supply: MetricPattern5, + pub supply_in_loss_rel_to_own_supply: MetricPattern5, + pub supply_in_profit_rel_to_circulating_supply: MetricPattern5, + pub supply_in_profit_rel_to_own_supply: MetricPattern5, + pub supply_rel_to_circulating_supply: MetricPattern4, + pub unrealized_loss_rel_to_market_cap: MetricPattern5, + pub unrealized_profit_rel_to_market_cap: MetricPattern5, } impl RelativePattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - neg_unrealized_loss_rel_to_market_cap: Indexes27::new(client.clone(), &format!("{base_path}_neg_unrealized_loss_rel_to_market_cap")), - net_unrealized_pnl_rel_to_market_cap: Indexes26::new(client.clone(), &format!("{base_path}_net_unrealized_pnl_rel_to_market_cap")), - supply_in_loss_rel_to_circulating_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_loss_rel_to_circulating_supply")), - supply_in_loss_rel_to_own_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_loss_rel_to_own_supply")), - supply_in_profit_rel_to_circulating_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_profit_rel_to_circulating_supply")), - supply_in_profit_rel_to_own_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_profit_rel_to_own_supply")), - supply_rel_to_circulating_supply: Indexes::new(client.clone(), &format!("{base_path}_supply_rel_to_circulating_supply")), - unrealized_loss_rel_to_market_cap: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_loss_rel_to_market_cap")), - unrealized_profit_rel_to_market_cap: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_profit_rel_to_market_cap")), + neg_unrealized_loss_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_market_cap")), + net_unrealized_pnl_rel_to_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_market_cap")), + nupl: MetricPattern4::new(client.clone(), _m(&acc, "nupl")), + supply_in_loss_rel_to_circulating_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_circulating_supply")), + supply_in_loss_rel_to_own_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), + supply_in_profit_rel_to_circulating_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_circulating_supply")), + supply_in_profit_rel_to_own_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), + supply_rel_to_circulating_supply: MetricPattern4::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), + unrealized_loss_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_market_cap")), + unrealized_profit_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_market_cap")), } } } /// Pattern struct for repeated tree structure. -pub struct AddresstypeToHeightToAddrCountPattern { - pub p2a: Indexes14, - pub p2pk33: Indexes15, - pub p2pk65: Indexes16, - pub p2pkh: Indexes17, - pub p2sh: Indexes18, - pub p2tr: Indexes19, - pub p2wpkh: Indexes20, - pub p2wsh: Indexes21, +pub struct BlockSizePattern { + pub average: MetricPattern1, + pub cumulative: MetricPattern1, + pub max: MetricPattern1, + pub median: MetricPattern25, + pub min: MetricPattern1, + pub pct10: MetricPattern25, + pub pct25: MetricPattern25, + pub pct75: MetricPattern25, + pub pct90: MetricPattern25, + pub sum: MetricPattern1, } -impl AddresstypeToHeightToAddrCountPattern { - pub fn new(client: Arc, base_path: &str) -> Self { +impl BlockSizePattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - p2a: Indexes14::new(client.clone(), &format!("{base_path}_p2a")), - p2pk33: Indexes15::new(client.clone(), &format!("{base_path}_p2pk33")), - p2pk65: Indexes16::new(client.clone(), &format!("{base_path}_p2pk65")), - p2pkh: Indexes17::new(client.clone(), &format!("{base_path}_p2pkh")), - p2sh: Indexes18::new(client.clone(), &format!("{base_path}_p2sh")), - p2tr: Indexes19::new(client.clone(), &format!("{base_path}_p2tr")), - p2wpkh: Indexes20::new(client.clone(), &format!("{base_path}_p2wpkh")), - p2wsh: Indexes21::new(client.clone(), &format!("{base_path}_p2wsh")), + average: MetricPattern1::new(client.clone(), _m(&acc, "avg")), + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), + max: MetricPattern1::new(client.clone(), _m(&acc, "max")), + median: MetricPattern25::new(client.clone(), _m(&acc, "median")), + min: MetricPattern1::new(client.clone(), _m(&acc, "min")), + pct10: MetricPattern25::new(client.clone(), _m(&acc, "pct10")), + pct25: MetricPattern25::new(client.clone(), _m(&acc, "pct25")), + pct75: MetricPattern25::new(client.clone(), _m(&acc, "pct75")), + pct90: MetricPattern25::new(client.clone(), _m(&acc, "pct90")), + sum: MetricPattern1::new(client.clone(), _m(&acc, "sum")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct UnrealizedPattern { + pub neg_unrealized_loss: MetricPattern3, + pub net_unrealized_pnl: MetricPattern3, + pub supply_in_loss: SupplyPattern2, + pub supply_in_loss_value: SupplyValuePattern, + pub supply_in_profit: SupplyPattern2, + pub supply_in_profit_value: SupplyValuePattern, + pub total_unrealized_pnl: MetricPattern3, + pub unrealized_loss: MetricPattern3, + pub unrealized_profit: MetricPattern3, +} + +impl UnrealizedPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + neg_unrealized_loss: MetricPattern3::new(client.clone(), _m(&acc, "neg_unrealized_loss")), + net_unrealized_pnl: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl")), + supply_in_loss: SupplyPattern2::new(client.clone(), _m(&acc, "supply_in_loss")), + supply_in_loss_value: SupplyValuePattern::new(client.clone(), _m(&acc, "supply_in_loss")), + supply_in_profit: SupplyPattern2::new(client.clone(), _m(&acc, "supply_in_profit")), + supply_in_profit_value: SupplyValuePattern::new(client.clone(), _m(&acc, "supply_in_profit")), + total_unrealized_pnl: MetricPattern3::new(client.clone(), _m(&acc, "total_unrealized_pnl")), + unrealized_loss: MetricPattern3::new(client.clone(), _m(&acc, "unrealized_loss")), + unrealized_profit: MetricPattern3::new(client.clone(), _m(&acc, "unrealized_profit")), } } } /// Pattern struct for repeated tree structure. pub struct Constant0Pattern { - pub dateindex: Indexes5, - pub decadeindex: Indexes7, - pub height: Indexes2, - pub monthindex: Indexes8, - pub quarterindex: Indexes9, - pub semesterindex: Indexes10, - pub weekindex: Indexes11, - pub yearindex: Indexes12, + pub dateindex: MetricPattern21, + pub decadeindex: MetricPattern22, + pub height: MetricPattern25, + pub monthindex: MetricPattern27, + pub quarterindex: MetricPattern39, + pub semesterindex: MetricPattern40, + pub weekindex: MetricPattern43, + pub yearindex: MetricPattern44, } impl Constant0Pattern { /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: &str) -> Self { + pub fn new(client: Arc, acc: String) -> Self { Self { - dateindex: Indexes5::new(client.clone(), &acc.to_string()), - decadeindex: Indexes7::new(client.clone(), &acc.to_string()), - height: Indexes2::new(client.clone(), &acc.to_string()), - monthindex: Indexes8::new(client.clone(), &acc.to_string()), - quarterindex: Indexes9::new(client.clone(), &acc.to_string()), - semesterindex: Indexes10::new(client.clone(), &acc.to_string()), - weekindex: Indexes11::new(client.clone(), &acc.to_string()), - yearindex: Indexes12::new(client.clone(), &acc.to_string()), + dateindex: MetricPattern21::new(client.clone(), acc.clone()), + decadeindex: MetricPattern22::new(client.clone(), acc.clone()), + height: MetricPattern25::new(client.clone(), acc.clone()), + monthindex: MetricPattern27::new(client.clone(), acc.clone()), + quarterindex: MetricPattern39::new(client.clone(), acc.clone()), + semesterindex: MetricPattern40::new(client.clone(), acc.clone()), + weekindex: MetricPattern43::new(client.clone(), acc.clone()), + yearindex: MetricPattern44::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct AddresstypeToHeightToAddrCountPattern { + pub p2a: MetricPattern25, + pub p2pk33: MetricPattern25, + pub p2pk65: MetricPattern25, + pub p2pkh: MetricPattern25, + pub p2sh: MetricPattern25, + pub p2tr: MetricPattern25, + pub p2wpkh: MetricPattern25, + pub p2wsh: MetricPattern25, +} + +impl AddresstypeToHeightToAddrCountPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + p2a: MetricPattern25::new(client.clone(), if acc.is_empty() { "p2a".to_string() } else { format!("p2a_{acc}") }), + p2pk33: MetricPattern25::new(client.clone(), if acc.is_empty() { "p2pk33".to_string() } else { format!("p2pk33_{acc}") }), + p2pk65: MetricPattern25::new(client.clone(), if acc.is_empty() { "p2pk65".to_string() } else { format!("p2pk65_{acc}") }), + p2pkh: MetricPattern25::new(client.clone(), if acc.is_empty() { "p2pkh".to_string() } else { format!("p2pkh_{acc}") }), + p2sh: MetricPattern25::new(client.clone(), if acc.is_empty() { "p2sh".to_string() } else { format!("p2sh_{acc}") }), + p2tr: MetricPattern25::new(client.clone(), if acc.is_empty() { "p2tr".to_string() } else { format!("p2tr_{acc}") }), + p2wpkh: MetricPattern25::new(client.clone(), if acc.is_empty() { "p2wpkh".to_string() } else { format!("p2wpkh_{acc}") }), + p2wsh: MetricPattern25::new(client.clone(), if acc.is_empty() { "p2wsh".to_string() } else { format!("p2wsh_{acc}") }), } } } /// Pattern struct for repeated tree structure. pub struct BlockIntervalPattern { - pub average: Indexes4, - pub max: Indexes4, - pub median: Indexes5, - pub min: Indexes4, - pub pct10: Indexes5, - pub pct25: Indexes5, - pub pct75: Indexes5, - pub pct90: Indexes5, + pub average: MetricPattern1, + pub max: MetricPattern1, + pub median: MetricPattern25, + pub min: MetricPattern1, + pub pct10: MetricPattern25, + pub pct25: MetricPattern25, + pub pct75: MetricPattern25, + pub pct90: MetricPattern25, } impl BlockIntervalPattern { /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: &str) -> Self { + pub fn new(client: Arc, acc: String) -> Self { Self { - average: Indexes4::new(client.clone(), &format!("{acc}_avg")), - max: Indexes4::new(client.clone(), &format!("{acc}_max")), - median: Indexes5::new(client.clone(), &format!("{acc}_median")), - min: Indexes4::new(client.clone(), &format!("{acc}_min")), - pct10: Indexes5::new(client.clone(), &format!("{acc}_pct10")), - pct25: Indexes5::new(client.clone(), &format!("{acc}_pct25")), - pct75: Indexes5::new(client.clone(), &format!("{acc}_pct75")), - pct90: Indexes5::new(client.clone(), &format!("{acc}_pct90")), + average: MetricPattern1::new(client.clone(), _m(&acc, "avg")), + max: MetricPattern1::new(client.clone(), _m(&acc, "max")), + median: MetricPattern25::new(client.clone(), _m(&acc, "median")), + min: MetricPattern1::new(client.clone(), _m(&acc, "min")), + pct10: MetricPattern25::new(client.clone(), _m(&acc, "pct10")), + pct25: MetricPattern25::new(client.clone(), _m(&acc, "pct25")), + pct75: MetricPattern25::new(client.clone(), _m(&acc, "pct75")), + pct90: MetricPattern25::new(client.clone(), _m(&acc, "pct90")), } } } /// Pattern struct for repeated tree structure. pub struct _0satsPattern { - pub activity: ActivityPattern, - pub addr_count: Indexes3, - pub price_paid: PricePaidPattern, + pub activity: ActivityPattern2, + pub addr_count: MetricPattern1, + pub cost_basis: CostBasisPattern, pub realized: RealizedPattern, pub relative: RelativePattern, - pub supply: SupplyPattern2, + pub supply: SupplyPattern3, pub unrealized: UnrealizedPattern, } impl _0satsPattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - activity: ActivityPattern::new(client.clone(), &format!("{base_path}_activity")), - addr_count: Indexes3::new(client.clone(), &format!("{base_path}_addr_count")), - price_paid: PricePaidPattern::new(client.clone(), &format!("{base_path}_price_paid")), - realized: RealizedPattern::new(client.clone(), &format!("{base_path}_realized")), - relative: RelativePattern::new(client.clone(), &format!("{base_path}_relative")), - supply: SupplyPattern2::new(client.clone(), &format!("{base_path}_supply")), - unrealized: UnrealizedPattern::new(client.clone(), &format!("{base_path}_unrealized")), + activity: ActivityPattern2::new(client.clone(), acc.clone()), + addr_count: MetricPattern1::new(client.clone(), _m(&acc, "addr_count")), + cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), + realized: RealizedPattern::new(client.clone(), acc.clone()), + relative: RelativePattern::new(client.clone(), acc.clone()), + supply: SupplyPattern3::new(client.clone(), acc.clone()), + unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. pub struct _0satsPattern2 { - pub activity: ActivityPattern, - pub price_paid: PricePaidPattern, + pub activity: ActivityPattern2, + pub cost_basis: CostBasisPattern, pub realized: RealizedPattern, pub relative: RelativePattern, - pub supply: SupplyPattern2, + pub supply: SupplyPattern3, pub unrealized: UnrealizedPattern, } impl _0satsPattern2 { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - activity: ActivityPattern::new(client.clone(), &format!("{base_path}_activity")), - price_paid: PricePaidPattern::new(client.clone(), &format!("{base_path}_price_paid")), - realized: RealizedPattern::new(client.clone(), &format!("{base_path}_realized")), - relative: RelativePattern::new(client.clone(), &format!("{base_path}_relative")), - supply: SupplyPattern2::new(client.clone(), &format!("{base_path}_supply")), - unrealized: UnrealizedPattern::new(client.clone(), &format!("{base_path}_unrealized")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct UpTo1dPattern { - pub activity: ActivityPattern, - pub price_paid: PricePaidPattern2, - pub realized: RealizedPattern3, - pub relative: RelativePattern2, - pub supply: SupplyPattern2, - pub unrealized: UnrealizedPattern, -} - -impl UpTo1dPattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - activity: ActivityPattern::new(client.clone(), &format!("{base_path}_activity")), - price_paid: PricePaidPattern2::new(client.clone(), &format!("{base_path}_price_paid")), - realized: RealizedPattern3::new(client.clone(), &format!("{base_path}_realized")), - relative: RelativePattern2::new(client.clone(), &format!("{base_path}_relative")), - supply: SupplyPattern2::new(client.clone(), &format!("{base_path}_supply")), - unrealized: UnrealizedPattern::new(client.clone(), &format!("{base_path}_unrealized")), + activity: ActivityPattern2::new(client.clone(), acc.clone()), + cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), + realized: RealizedPattern::new(client.clone(), acc.clone()), + relative: RelativePattern::new(client.clone(), acc.clone()), + supply: SupplyPattern3::new(client.clone(), acc.clone()), + unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. pub struct _10yTo12yPattern { - pub activity: ActivityPattern, - pub price_paid: PricePaidPattern2, + pub activity: ActivityPattern2, + pub cost_basis: CostBasisPattern2, pub realized: RealizedPattern2, pub relative: RelativePattern2, - pub supply: SupplyPattern2, + pub supply: SupplyPattern3, pub unrealized: UnrealizedPattern, } impl _10yTo12yPattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - activity: ActivityPattern::new(client.clone(), &format!("{base_path}_activity")), - price_paid: PricePaidPattern2::new(client.clone(), &format!("{base_path}_price_paid")), - realized: RealizedPattern2::new(client.clone(), &format!("{base_path}_realized")), - relative: RelativePattern2::new(client.clone(), &format!("{base_path}_relative")), - supply: SupplyPattern2::new(client.clone(), &format!("{base_path}_supply")), - unrealized: UnrealizedPattern::new(client.clone(), &format!("{base_path}_unrealized")), + activity: ActivityPattern2::new(client.clone(), acc.clone()), + cost_basis: CostBasisPattern2::new(client.clone(), acc.clone()), + realized: RealizedPattern2::new(client.clone(), acc.clone()), + relative: RelativePattern2::new(client.clone(), acc.clone()), + supply: SupplyPattern3::new(client.clone(), acc.clone()), + unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. -pub struct ActivityPattern { - pub coinblocks_destroyed: BlockCountPattern, - pub coindays_destroyed: BlockCountPattern, - pub satblocks_destroyed: Indexes2, - pub satdays_destroyed: Indexes2, - pub sent: FeePattern2, +pub struct UpTo1dPattern { + pub activity: ActivityPattern2, + pub cost_basis: CostBasisPattern2, + pub realized: RealizedPattern3, + pub relative: RelativePattern2, + pub supply: SupplyPattern3, + pub unrealized: UnrealizedPattern, } -impl ActivityPattern { - pub fn new(client: Arc, base_path: &str) -> Self { +impl UpTo1dPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - coinblocks_destroyed: BlockCountPattern::new(client.clone(), &format!("{base_path}_coinblocks_destroyed")), - coindays_destroyed: BlockCountPattern::new(client.clone(), &format!("{base_path}_coindays_destroyed")), - satblocks_destroyed: Indexes2::new(client.clone(), &format!("{base_path}_satblocks_destroyed")), - satdays_destroyed: Indexes2::new(client.clone(), &format!("{base_path}_satdays_destroyed")), - sent: FeePattern2::new(client.clone(), &format!("{base_path}_sent")), + activity: ActivityPattern2::new(client.clone(), acc.clone()), + cost_basis: CostBasisPattern2::new(client.clone(), acc.clone()), + realized: RealizedPattern3::new(client.clone(), acc.clone()), + relative: RelativePattern2::new(client.clone(), acc.clone()), + supply: SupplyPattern3::new(client.clone(), acc.clone()), + unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SegwitAdoptionPattern { + pub average: MetricPattern2, + pub base: MetricPattern25, + pub cumulative: MetricPattern1, + pub max: MetricPattern2, + pub min: MetricPattern2, + pub sum: MetricPattern2, +} + +impl SegwitAdoptionPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + average: MetricPattern2::new(client.clone(), _m(&acc, "avg")), + base: MetricPattern25::new(client.clone(), acc.clone()), + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), + max: MetricPattern2::new(client.clone(), _m(&acc, "max")), + min: MetricPattern2::new(client.clone(), _m(&acc, "min")), + sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct ActivityPattern2 { + pub coinblocks_destroyed: BlockCountPattern, + pub coindays_destroyed: BlockCountPattern, + pub satblocks_destroyed: MetricPattern25, + pub satdays_destroyed: MetricPattern25, + pub sent: SentPattern, +} + +impl ActivityPattern2 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + coinblocks_destroyed: BlockCountPattern::new(client.clone(), _m(&acc, "coinblocks_destroyed")), + coindays_destroyed: BlockCountPattern::new(client.clone(), _m(&acc, "coindays_destroyed")), + satblocks_destroyed: MetricPattern25::new(client.clone(), _m(&acc, "satblocks_destroyed")), + satdays_destroyed: MetricPattern25::new(client.clone(), _m(&acc, "satdays_destroyed")), + sent: SentPattern::new(client.clone(), _m(&acc, "sent")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SupplyPattern3 { + pub supply: SupplyPattern2, + pub supply_half: ActiveSupplyPattern, + pub supply_half_value: ActiveSupplyPattern, + pub supply_value: SupplyValuePattern, + pub utxo_count: MetricPattern1, +} + +impl SupplyPattern3 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + supply: SupplyPattern2::new(client.clone(), _m(&acc, "supply")), + supply_half: ActiveSupplyPattern::new(client.clone(), _m(&acc, "supply_half")), + supply_half_value: ActiveSupplyPattern::new(client.clone(), _m(&acc, "supply_half")), + supply_value: SupplyValuePattern::new(client.clone(), _m(&acc, "supply")), + utxo_count: MetricPattern1::new(client.clone(), _m(&acc, "utxo_count")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SentPattern { + pub base: MetricPattern25, + pub bitcoin: BlockCountPattern, + pub dollars: BlockCountPattern, + pub sats: SatsPattern, +} + +impl SentPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + base: MetricPattern25::new(client.clone(), acc.clone()), + bitcoin: BlockCountPattern::new(client.clone(), _m(&acc, "btc")), + dollars: BlockCountPattern::new(client.clone(), _m(&acc, "usd")), + sats: SatsPattern::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct OpreturnPattern { + pub base: MetricPattern25, + pub bitcoin: BitcoinPattern2, + pub dollars: BitcoinPattern2, + pub sats: SatsPattern4, +} + +impl OpreturnPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + base: MetricPattern25::new(client.clone(), acc.clone()), + bitcoin: BitcoinPattern2::new(client.clone(), _m(&acc, "btc")), + dollars: BitcoinPattern2::new(client.clone(), _m(&acc, "usd")), + sats: SatsPattern4::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. pub struct SupplyPattern2 { - pub supply: SupplyPattern, - pub supply_half: ActiveSupplyPattern, - pub supply_half_value: ActiveSupplyPattern, - pub supply_value: SupplyValuePattern, - pub utxo_count: Indexes3, + pub base: MetricPattern25, + pub bitcoin: MetricPattern4, + pub dollars: MetricPattern4, + pub sats: MetricPattern4, } impl SupplyPattern2 { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - supply: SupplyPattern::new(client.clone(), &format!("{base_path}_supply")), - supply_half: ActiveSupplyPattern::new(client.clone(), &format!("{base_path}_supply_half")), - supply_half_value: ActiveSupplyPattern::new(client.clone(), &format!("{base_path}_supply_half_value")), - supply_value: SupplyValuePattern::new(client.clone(), &format!("{base_path}_supply_value")), - utxo_count: Indexes3::new(client.clone(), &format!("{base_path}_utxo_count")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct SupplyPattern { - pub base: Indexes2, - pub bitcoin: Indexes, - pub dollars: Indexes, - pub sats: Indexes, -} - -impl SupplyPattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - base: Indexes2::new(client.clone(), &format!("{base_path}_base")), - bitcoin: Indexes::new(client.clone(), &format!("{base_path}_bitcoin")), - dollars: Indexes::new(client.clone(), &format!("{base_path}_dollars")), - sats: Indexes::new(client.clone(), &format!("{base_path}_sats")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct FeePattern2 { - pub base: Indexes2, - pub bitcoin: BlockCountPattern, - pub dollars: BlockCountPattern, - pub sats: SatsPattern, -} - -impl FeePattern2 { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - base: Indexes2::new(client.clone(), &format!("{base_path}_base")), - bitcoin: BlockCountPattern::new(client.clone(), &format!("{base_path}_bitcoin")), - dollars: BlockCountPattern::new(client.clone(), &format!("{base_path}_dollars")), - sats: SatsPattern::new(client.clone(), &format!("{base_path}_sats")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct CoinbasePattern { - pub bitcoin: BitcoinPattern, - pub dollars: BitcoinPattern, - pub sats: BitcoinPattern, -} - -impl CoinbasePattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - bitcoin: BitcoinPattern::new(client.clone(), &format!("{base_path}_bitcoin")), - dollars: BitcoinPattern::new(client.clone(), &format!("{base_path}_dollars")), - sats: BitcoinPattern::new(client.clone(), &format!("{base_path}_sats")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct ActiveSupplyPattern { - pub bitcoin: Indexes3, - pub dollars: Indexes3, - pub sats: Indexes3, -} - -impl ActiveSupplyPattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - bitcoin: Indexes3::new(client.clone(), &format!("{base_path}_bitcoin")), - dollars: Indexes3::new(client.clone(), &format!("{base_path}_dollars")), - sats: Indexes3::new(client.clone(), &format!("{base_path}_sats")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct PricePaidPattern2 { - pub max_price_paid: Indexes3, - pub min_price_paid: Indexes3, - pub price_percentiles: PricePercentilesPattern, -} - -impl PricePaidPattern2 { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - max_price_paid: Indexes3::new(client.clone(), &format!("{base_path}_max_price_paid")), - min_price_paid: Indexes3::new(client.clone(), &format!("{base_path}_min_price_paid")), - price_percentiles: PricePercentilesPattern::new(client.clone(), &format!("{base_path}_price_percentiles")), + base: MetricPattern25::new(client.clone(), acc.clone()), + bitcoin: MetricPattern4::new(client.clone(), _m(&acc, "btc")), + dollars: MetricPattern4::new(client.clone(), _m(&acc, "usd")), + sats: MetricPattern4::new(client.clone(), acc.clone()), } } } @@ -1539,117 +4204,212 @@ pub struct UnclaimedRewardsPattern { } impl UnclaimedRewardsPattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: BlockCountPattern::new(client.clone(), &format!("{base_path}_bitcoin")), - dollars: BlockCountPattern::new(client.clone(), &format!("{base_path}_dollars")), - sats: BlockCountPattern::new(client.clone(), &format!("{base_path}_sats")), + bitcoin: BlockCountPattern::new(client.clone(), _m(&acc, "btc")), + dollars: BlockCountPattern::new(client.clone(), _m(&acc, "usd")), + sats: BlockCountPattern::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct CoinbasePattern { + pub bitcoin: BitcoinPattern, + pub dollars: BitcoinPattern, + pub sats: BitcoinPattern, +} + +impl CoinbasePattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + bitcoin: BitcoinPattern::new(client.clone(), _m(&acc, "btc")), + dollars: BitcoinPattern::new(client.clone(), _m(&acc, "usd")), + sats: BitcoinPattern::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct ActiveSupplyPattern { + pub bitcoin: MetricPattern1, + pub dollars: MetricPattern1, + pub sats: MetricPattern1, +} + +impl ActiveSupplyPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + bitcoin: MetricPattern1::new(client.clone(), _m(&acc, "btc")), + dollars: MetricPattern1::new(client.clone(), _m(&acc, "usd")), + sats: MetricPattern1::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct CostBasisPattern2 { + pub max_cost_basis: MetricPattern1, + pub min_cost_basis: MetricPattern1, + pub percentiles: PercentilesPattern, +} + +impl CostBasisPattern2 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + max_cost_basis: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")), + min_cost_basis: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")), + percentiles: PercentilesPattern::new(client.clone(), _m(&acc, "cost_basis")), } } } /// Pattern struct for repeated tree structure. pub struct BlockCountPattern { - pub base: Indexes2, - pub cumulative: Indexes3, - pub sum: Indexes4, + pub base: MetricPattern25, + pub cumulative: MetricPattern1, + pub sum: MetricPattern2, } impl BlockCountPattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - base: Indexes2::new(client.clone(), &format!("{base_path}_base")), - cumulative: Indexes3::new(client.clone(), &format!("{base_path}_cumulative")), - sum: Indexes4::new(client.clone(), &format!("{base_path}_sum")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct SupplyValuePattern { - pub bitcoin: Indexes2, - pub dollars: Indexes2, -} - -impl SupplyValuePattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - bitcoin: Indexes2::new(client.clone(), &format!("{base_path}_bitcoin")), - dollars: Indexes2::new(client.clone(), &format!("{base_path}_dollars")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct SatsPattern { - pub cumulative: Indexes3, - pub sum: Indexes4, -} - -impl SatsPattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - cumulative: Indexes3::new(client.clone(), &format!("{base_path}_cumulative")), - sum: Indexes4::new(client.clone(), &format!("{base_path}_sum")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct _1dReturns1mSdPattern { - pub sd: Indexes, - pub sma: Indexes, -} - -impl _1dReturns1mSdPattern { /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: &str) -> Self { + pub fn new(client: Arc, acc: String) -> Self { Self { - sd: Indexes::new(client.clone(), &format!("{acc}_sd")), - sma: Indexes::new(client.clone(), &format!("{acc}_sma")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct PricePaidPattern { - pub max_price_paid: Indexes3, - pub min_price_paid: Indexes3, -} - -impl PricePaidPattern { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - max_price_paid: Indexes3::new(client.clone(), &format!("{base_path}_max_price_paid")), - min_price_paid: Indexes3::new(client.clone(), &format!("{base_path}_min_price_paid")), + base: MetricPattern25::new(client.clone(), acc.clone()), + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), + sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), } } } /// Pattern struct for repeated tree structure. pub struct BitcoinPattern2 { - pub base: Indexes2, - pub sum: Indexes4, + pub base: MetricPattern25, + pub cumulative: MetricPattern1, + pub last: MetricPattern2, } impl BitcoinPattern2 { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - base: Indexes2::new(client.clone(), &format!("{base_path}_base")), - sum: Indexes4::new(client.clone(), &format!("{base_path}_sum")), + base: MetricPattern25::new(client.clone(), acc.clone()), + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), + last: MetricPattern2::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SatsPattern4 { + pub cumulative: MetricPattern1, + pub last: MetricPattern2, +} + +impl SatsPattern4 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), + last: MetricPattern2::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct CostBasisPattern { + pub max_cost_basis: MetricPattern1, + pub min_cost_basis: MetricPattern1, +} + +impl CostBasisPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + max_cost_basis: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")), + min_cost_basis: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SatsPattern { + pub cumulative: MetricPattern1, + pub sum: MetricPattern2, +} + +impl SatsPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), + sum: MetricPattern2::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct _1dReturns1mSdPattern { + pub sd: MetricPattern4, + pub sma: MetricPattern4, +} + +impl _1dReturns1mSdPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + sd: MetricPattern4::new(client.clone(), _m(&acc, "sd")), + sma: MetricPattern4::new(client.clone(), _m(&acc, "sma")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SupplyValuePattern { + pub bitcoin: MetricPattern25, + pub dollars: MetricPattern25, +} + +impl SupplyValuePattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + bitcoin: MetricPattern25::new(client.clone(), _m(&acc, "btc")), + dollars: MetricPattern25::new(client.clone(), _m(&acc, "usd")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct TotalRealizedPnlPattern { + pub base: MetricPattern25, + pub sum: MetricPattern2, +} + +impl TotalRealizedPnlPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + base: MetricPattern25::new(client.clone(), acc.clone()), + sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), } } } /// Pattern struct for repeated tree structure. pub struct RealizedPriceExtraPattern { - pub ratio: Indexes, + pub ratio: MetricPattern4, } impl RealizedPriceExtraPattern { - pub fn new(client: Arc, base_path: &str) -> Self { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { Self { - ratio: Indexes::new(client.clone(), &format!("{base_path}_ratio")), + ratio: MetricPattern4::new(client.clone(), _m(&acc, "ratio")), } } } @@ -1663,460 +4423,416 @@ pub struct CatalogTree { } impl CatalogTree { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - computed: CatalogTree_Computed::new(client.clone(), &format!("{base_path}_computed")), - indexed: CatalogTree_Indexed::new(client.clone(), &format!("{base_path}_indexed")), + computed: CatalogTree_Computed::new(client.clone(), format!("{base_path}_computed")), + indexed: CatalogTree_Indexed::new(client.clone(), format!("{base_path}_indexed")), } } } /// Catalog tree node. pub struct CatalogTree_Computed { - pub blks: CatalogTree_Computed_Blks, - pub chain: CatalogTree_Computed_Chain, + pub blocks: CatalogTree_Computed_Blocks, pub cointime: CatalogTree_Computed_Cointime, pub constants: CatalogTree_Computed_Constants, - pub fetched: CatalogTree_Computed_Fetched, + pub distribution: CatalogTree_Computed_Distribution, pub indexes: CatalogTree_Computed_Indexes, + pub inputs: CatalogTree_Computed_Inputs, pub market: CatalogTree_Computed_Market, + pub outputs: CatalogTree_Computed_Outputs, pub pools: CatalogTree_Computed_Pools, + pub positions: CatalogTree_Computed_Positions, pub price: CatalogTree_Computed_Price, - pub stateful: CatalogTree_Computed_Stateful, - pub txins: CatalogTree_Computed_Txins, - pub txouts: CatalogTree_Computed_Txouts, + pub scripts: CatalogTree_Computed_Scripts, + pub supply: CatalogTree_Computed_Supply, + pub transactions: CatalogTree_Computed_Transactions, } impl CatalogTree_Computed { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - blks: CatalogTree_Computed_Blks::new(client.clone(), &format!("{base_path}_blks")), - chain: CatalogTree_Computed_Chain::new(client.clone(), &format!("{base_path}_chain")), - cointime: CatalogTree_Computed_Cointime::new(client.clone(), &format!("{base_path}_cointime")), - constants: CatalogTree_Computed_Constants::new(client.clone(), &format!("{base_path}_constants")), - fetched: CatalogTree_Computed_Fetched::new(client.clone(), &format!("{base_path}_fetched")), - indexes: CatalogTree_Computed_Indexes::new(client.clone(), &format!("{base_path}_indexes")), - market: CatalogTree_Computed_Market::new(client.clone(), &format!("{base_path}_market")), - pools: CatalogTree_Computed_Pools::new(client.clone(), &format!("{base_path}_pools")), - price: CatalogTree_Computed_Price::new(client.clone(), &format!("{base_path}_price")), - stateful: CatalogTree_Computed_Stateful::new(client.clone(), &format!("{base_path}_stateful")), - txins: CatalogTree_Computed_Txins::new(client.clone(), &format!("{base_path}_txins")), - txouts: CatalogTree_Computed_Txouts::new(client.clone(), &format!("{base_path}_txouts")), + blocks: CatalogTree_Computed_Blocks::new(client.clone(), format!("{base_path}_blocks")), + cointime: CatalogTree_Computed_Cointime::new(client.clone(), format!("{base_path}_cointime")), + constants: CatalogTree_Computed_Constants::new(client.clone(), format!("{base_path}_constants")), + distribution: CatalogTree_Computed_Distribution::new(client.clone(), format!("{base_path}_distribution")), + indexes: CatalogTree_Computed_Indexes::new(client.clone(), format!("{base_path}_indexes")), + inputs: CatalogTree_Computed_Inputs::new(client.clone(), format!("{base_path}_inputs")), + market: CatalogTree_Computed_Market::new(client.clone(), format!("{base_path}_market")), + outputs: CatalogTree_Computed_Outputs::new(client.clone(), format!("{base_path}_outputs")), + pools: CatalogTree_Computed_Pools::new(client.clone(), format!("{base_path}_pools")), + positions: CatalogTree_Computed_Positions::new(client.clone(), format!("{base_path}_positions")), + price: CatalogTree_Computed_Price::new(client.clone(), format!("{base_path}_price")), + scripts: CatalogTree_Computed_Scripts::new(client.clone(), format!("{base_path}_scripts")), + supply: CatalogTree_Computed_Supply::new(client.clone(), format!("{base_path}_supply")), + transactions: CatalogTree_Computed_Transactions::new(client.clone(), format!("{base_path}_transactions")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Blks { - pub position: MetricNode, +pub struct CatalogTree_Computed_Blocks { + pub count: CatalogTree_Computed_Blocks_Count, + pub difficulty: CatalogTree_Computed_Blocks_Difficulty, + pub halving: CatalogTree_Computed_Blocks_Halving, + pub interval: CatalogTree_Computed_Blocks_Interval, + pub mining: CatalogTree_Computed_Blocks_Mining, + pub rewards: CatalogTree_Computed_Blocks_Rewards, + pub size: CatalogTree_Computed_Blocks_Size, + pub time: CatalogTree_Computed_Blocks_Time, + pub weight: CatalogTree_Computed_Blocks_Weight, } -impl CatalogTree_Computed_Blks { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Blocks { + pub fn new(client: Arc, base_path: String) -> Self { Self { - position: MetricNode::new(client.clone(), format!("{base_path}_position")), + count: CatalogTree_Computed_Blocks_Count::new(client.clone(), format!("{base_path}_count")), + difficulty: CatalogTree_Computed_Blocks_Difficulty::new(client.clone(), format!("{base_path}_difficulty")), + halving: CatalogTree_Computed_Blocks_Halving::new(client.clone(), format!("{base_path}_halving")), + interval: CatalogTree_Computed_Blocks_Interval::new(client.clone(), format!("{base_path}_interval")), + mining: CatalogTree_Computed_Blocks_Mining::new(client.clone(), format!("{base_path}_mining")), + rewards: CatalogTree_Computed_Blocks_Rewards::new(client.clone(), format!("{base_path}_rewards")), + size: CatalogTree_Computed_Blocks_Size::new(client.clone(), format!("{base_path}_size")), + time: CatalogTree_Computed_Blocks_Time::new(client.clone(), format!("{base_path}_time")), + weight: CatalogTree_Computed_Blocks_Weight::new(client.clone(), format!("{base_path}_weight")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Chain { - pub block: CatalogTree_Computed_Chain_Block, - pub coinbase: CatalogTree_Computed_Chain_Coinbase, - pub epoch: CatalogTree_Computed_Chain_Epoch, - pub mining: CatalogTree_Computed_Chain_Mining, - pub output_type: CatalogTree_Computed_Chain_OutputType, - pub transaction: CatalogTree_Computed_Chain_Transaction, - pub volume: CatalogTree_Computed_Chain_Volume, -} - -impl CatalogTree_Computed_Chain { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - block: CatalogTree_Computed_Chain_Block::new(client.clone(), &format!("{base_path}_block")), - coinbase: CatalogTree_Computed_Chain_Coinbase::new(client.clone(), &format!("{base_path}_coinbase")), - epoch: CatalogTree_Computed_Chain_Epoch::new(client.clone(), &format!("{base_path}_epoch")), - mining: CatalogTree_Computed_Chain_Mining::new(client.clone(), &format!("{base_path}_mining")), - output_type: CatalogTree_Computed_Chain_OutputType::new(client.clone(), &format!("{base_path}_output_type")), - transaction: CatalogTree_Computed_Chain_Transaction::new(client.clone(), &format!("{base_path}_transaction")), - volume: CatalogTree_Computed_Chain_Volume::new(client.clone(), &format!("{base_path}_volume")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Chain_Block { - pub _1m_block_count: Indexes, - pub _1w_block_count: Indexes, - pub _1y_block_count: Indexes, - pub _24h_block_count: Indexes2, +pub struct CatalogTree_Computed_Blocks_Count { + pub _1m_block_count: MetricPattern4, + pub _1w_block_count: MetricPattern4, + pub _1y_block_count: MetricPattern4, + pub _24h_block_count: MetricPattern25, pub block_count: BlockCountPattern, - pub block_count_target: Indexes, - pub block_interval: BlockIntervalPattern, - pub block_size: BlockSizePattern, - pub block_vbytes: BlockSizePattern, - pub block_weight: BlockSizePattern, - pub interval: Indexes2, - pub timestamp: Indexes4, - pub vbytes: Indexes2, + pub block_count_target: MetricPattern4, } -impl CatalogTree_Computed_Chain_Block { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Blocks_Count { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _1m_block_count: Indexes::new(client.clone(), &format!("{base_path}_1m_block_count")), - _1w_block_count: Indexes::new(client.clone(), &format!("{base_path}_1w_block_count")), - _1y_block_count: Indexes::new(client.clone(), &format!("{base_path}_1y_block_count")), - _24h_block_count: Indexes2::new(client.clone(), &format!("{base_path}_24h_block_count")), - block_count: BlockCountPattern::new(client.clone(), &format!("{base_path}_block_count")), - block_count_target: Indexes::new(client.clone(), &format!("{base_path}_block_count_target")), - block_interval: BlockIntervalPattern::new(client.clone(), "block_interval"), - block_size: BlockSizePattern::new(client.clone(), &format!("{base_path}_block_size")), - block_vbytes: BlockSizePattern::new(client.clone(), &format!("{base_path}_block_vbytes")), - block_weight: BlockSizePattern::new(client.clone(), &format!("{base_path}_block_weight")), - interval: Indexes2::new(client.clone(), &format!("{base_path}_interval")), - timestamp: Indexes4::new(client.clone(), &format!("{base_path}_timestamp")), - vbytes: Indexes2::new(client.clone(), &format!("{base_path}_vbytes")), + _1m_block_count: MetricPattern4::new(client.clone(), format!("{base_path}_1m_block_count")), + _1w_block_count: MetricPattern4::new(client.clone(), format!("{base_path}_1w_block_count")), + _1y_block_count: MetricPattern4::new(client.clone(), format!("{base_path}_1y_block_count")), + _24h_block_count: MetricPattern25::new(client.clone(), format!("{base_path}_24h_block_count")), + block_count: BlockCountPattern::new(client.clone(), "block_count".to_string()), + block_count_target: MetricPattern4::new(client.clone(), format!("{base_path}_block_count_target")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Chain_Coinbase { - pub _24h_coinbase_sum: Indexes2, - pub _24h_coinbase_usd_sum: Indexes2, +pub struct CatalogTree_Computed_Blocks_Difficulty { + pub blocks_before_next_difficulty_adjustment: MetricPattern1, + pub days_before_next_difficulty_adjustment: MetricPattern1, + pub difficultyepoch: MetricPattern4, +} + +impl CatalogTree_Computed_Blocks_Difficulty { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + blocks_before_next_difficulty_adjustment: MetricPattern1::new(client.clone(), format!("{base_path}_blocks_before_next_difficulty_adjustment")), + days_before_next_difficulty_adjustment: MetricPattern1::new(client.clone(), format!("{base_path}_days_before_next_difficulty_adjustment")), + difficultyepoch: MetricPattern4::new(client.clone(), format!("{base_path}_difficultyepoch")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Blocks_Halving { + pub blocks_before_next_halving: MetricPattern1, + pub days_before_next_halving: MetricPattern1, + pub halvingepoch: MetricPattern4, +} + +impl CatalogTree_Computed_Blocks_Halving { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + blocks_before_next_halving: MetricPattern1::new(client.clone(), format!("{base_path}_blocks_before_next_halving")), + days_before_next_halving: MetricPattern1::new(client.clone(), format!("{base_path}_days_before_next_halving")), + halvingepoch: MetricPattern4::new(client.clone(), format!("{base_path}_halvingepoch")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Blocks_Interval { + pub block_interval: BlockIntervalPattern, + pub interval: MetricPattern25, +} + +impl CatalogTree_Computed_Blocks_Interval { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + block_interval: BlockIntervalPattern::new(client.clone(), "block_interval".to_string()), + interval: MetricPattern25::new(client.clone(), format!("{base_path}_interval")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Blocks_Mining { + pub difficulty: MetricPattern2, + pub difficulty_adjustment: MetricPattern1, + pub difficulty_as_hash: MetricPattern1, + pub hash_price_phs: MetricPattern1, + pub hash_price_phs_min: MetricPattern1, + pub hash_price_rebound: MetricPattern1, + pub hash_price_ths: MetricPattern1, + pub hash_price_ths_min: MetricPattern1, + pub hash_rate: MetricPattern1, + pub hash_rate_1m_sma: MetricPattern4, + pub hash_rate_1w_sma: MetricPattern4, + pub hash_rate_1y_sma: MetricPattern4, + pub hash_rate_2m_sma: MetricPattern4, + pub hash_value_phs: MetricPattern1, + pub hash_value_phs_min: MetricPattern1, + pub hash_value_rebound: MetricPattern1, + pub hash_value_ths: MetricPattern1, + pub hash_value_ths_min: MetricPattern1, +} + +impl CatalogTree_Computed_Blocks_Mining { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + difficulty: MetricPattern2::new(client.clone(), format!("{base_path}_difficulty")), + difficulty_adjustment: MetricPattern1::new(client.clone(), format!("{base_path}_difficulty_adjustment")), + difficulty_as_hash: MetricPattern1::new(client.clone(), format!("{base_path}_difficulty_as_hash")), + hash_price_phs: MetricPattern1::new(client.clone(), format!("{base_path}_hash_price_phs")), + hash_price_phs_min: MetricPattern1::new(client.clone(), format!("{base_path}_hash_price_phs_min")), + hash_price_rebound: MetricPattern1::new(client.clone(), format!("{base_path}_hash_price_rebound")), + hash_price_ths: MetricPattern1::new(client.clone(), format!("{base_path}_hash_price_ths")), + hash_price_ths_min: MetricPattern1::new(client.clone(), format!("{base_path}_hash_price_ths_min")), + hash_rate: MetricPattern1::new(client.clone(), format!("{base_path}_hash_rate")), + hash_rate_1m_sma: MetricPattern4::new(client.clone(), format!("{base_path}_hash_rate_1m_sma")), + hash_rate_1w_sma: MetricPattern4::new(client.clone(), format!("{base_path}_hash_rate_1w_sma")), + hash_rate_1y_sma: MetricPattern4::new(client.clone(), format!("{base_path}_hash_rate_1y_sma")), + hash_rate_2m_sma: MetricPattern4::new(client.clone(), format!("{base_path}_hash_rate_2m_sma")), + hash_value_phs: MetricPattern1::new(client.clone(), format!("{base_path}_hash_value_phs")), + hash_value_phs_min: MetricPattern1::new(client.clone(), format!("{base_path}_hash_value_phs_min")), + hash_value_rebound: MetricPattern1::new(client.clone(), format!("{base_path}_hash_value_rebound")), + hash_value_ths: MetricPattern1::new(client.clone(), format!("{base_path}_hash_value_ths")), + hash_value_ths_min: MetricPattern1::new(client.clone(), format!("{base_path}_hash_value_ths_min")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Blocks_Rewards { + pub _24h_coinbase_sum: MetricPattern25, + pub _24h_coinbase_usd_sum: MetricPattern25, pub coinbase: CoinbasePattern, - pub fee_dominance: Indexes5, - pub inflation_rate: Indexes, - pub puell_multiple: Indexes, + pub fee_dominance: MetricPattern21, pub subsidy: CoinbasePattern, - pub subsidy_dominance: Indexes5, - pub subsidy_usd_1y_sma: Indexes, + pub subsidy_dominance: MetricPattern21, + pub subsidy_usd_1y_sma: MetricPattern4, pub unclaimed_rewards: UnclaimedRewardsPattern, } -impl CatalogTree_Computed_Chain_Coinbase { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Blocks_Rewards { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _24h_coinbase_sum: Indexes2::new(client.clone(), &format!("{base_path}_24h_coinbase_sum")), - _24h_coinbase_usd_sum: Indexes2::new(client.clone(), &format!("{base_path}_24h_coinbase_usd_sum")), - coinbase: CoinbasePattern::new(client.clone(), &format!("{base_path}_coinbase")), - fee_dominance: Indexes5::new(client.clone(), &format!("{base_path}_fee_dominance")), - inflation_rate: Indexes::new(client.clone(), &format!("{base_path}_inflation_rate")), - puell_multiple: Indexes::new(client.clone(), &format!("{base_path}_puell_multiple")), - subsidy: CoinbasePattern::new(client.clone(), &format!("{base_path}_subsidy")), - subsidy_dominance: Indexes5::new(client.clone(), &format!("{base_path}_subsidy_dominance")), - subsidy_usd_1y_sma: Indexes::new(client.clone(), &format!("{base_path}_subsidy_usd_1y_sma")), - unclaimed_rewards: UnclaimedRewardsPattern::new(client.clone(), &format!("{base_path}_unclaimed_rewards")), + _24h_coinbase_sum: MetricPattern25::new(client.clone(), format!("{base_path}_24h_coinbase_sum")), + _24h_coinbase_usd_sum: MetricPattern25::new(client.clone(), format!("{base_path}_24h_coinbase_usd_sum")), + coinbase: CoinbasePattern::new(client.clone(), "coinbase".to_string()), + fee_dominance: MetricPattern21::new(client.clone(), format!("{base_path}_fee_dominance")), + subsidy: CoinbasePattern::new(client.clone(), "subsidy".to_string()), + subsidy_dominance: MetricPattern21::new(client.clone(), format!("{base_path}_subsidy_dominance")), + subsidy_usd_1y_sma: MetricPattern4::new(client.clone(), format!("{base_path}_subsidy_usd_1y_sma")), + unclaimed_rewards: UnclaimedRewardsPattern::new(client.clone(), "unclaimed_rewards".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Chain_Epoch { - pub blocks_before_next_difficulty_adjustment: Indexes3, - pub blocks_before_next_halving: Indexes3, - pub days_before_next_difficulty_adjustment: Indexes3, - pub days_before_next_halving: Indexes3, - pub difficultyepoch: Indexes, - pub halvingepoch: Indexes, +pub struct CatalogTree_Computed_Blocks_Size { + pub block_size: BlockSizePattern, + pub block_vbytes: BlockSizePattern, + pub vbytes: MetricPattern25, } -impl CatalogTree_Computed_Chain_Epoch { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Blocks_Size { + pub fn new(client: Arc, base_path: String) -> Self { Self { - blocks_before_next_difficulty_adjustment: Indexes3::new(client.clone(), &format!("{base_path}_blocks_before_next_difficulty_adjustment")), - blocks_before_next_halving: Indexes3::new(client.clone(), &format!("{base_path}_blocks_before_next_halving")), - days_before_next_difficulty_adjustment: Indexes3::new(client.clone(), &format!("{base_path}_days_before_next_difficulty_adjustment")), - days_before_next_halving: Indexes3::new(client.clone(), &format!("{base_path}_days_before_next_halving")), - difficultyepoch: Indexes::new(client.clone(), &format!("{base_path}_difficultyepoch")), - halvingepoch: Indexes::new(client.clone(), &format!("{base_path}_halvingepoch")), + block_size: BlockSizePattern::new(client.clone(), "block_size".to_string()), + block_vbytes: BlockSizePattern::new(client.clone(), "block_vbytes".to_string()), + vbytes: MetricPattern25::new(client.clone(), format!("{base_path}_vbytes")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Chain_Mining { - pub difficulty: Indexes4, - pub difficulty_adjustment: Indexes3, - pub difficulty_as_hash: Indexes3, - pub hash_price_phs: Indexes3, - pub hash_price_phs_min: Indexes3, - pub hash_price_rebound: Indexes3, - pub hash_price_ths: Indexes3, - pub hash_price_ths_min: Indexes3, - pub hash_rate: Indexes3, - pub hash_rate_1m_sma: Indexes, - pub hash_rate_1w_sma: Indexes, - pub hash_rate_1y_sma: Indexes, - pub hash_rate_2m_sma: Indexes, - pub hash_value_phs: Indexes3, - pub hash_value_phs_min: Indexes3, - pub hash_value_rebound: Indexes3, - pub hash_value_ths: Indexes3, - pub hash_value_ths_min: Indexes3, +pub struct CatalogTree_Computed_Blocks_Time { + pub date: MetricPattern25, + pub date_fixed: MetricPattern25, + pub timestamp: MetricPattern2, + pub timestamp_fixed: MetricPattern25, } -impl CatalogTree_Computed_Chain_Mining { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Blocks_Time { + pub fn new(client: Arc, base_path: String) -> Self { Self { - difficulty: Indexes4::new(client.clone(), &format!("{base_path}_difficulty")), - difficulty_adjustment: Indexes3::new(client.clone(), &format!("{base_path}_difficulty_adjustment")), - difficulty_as_hash: Indexes3::new(client.clone(), &format!("{base_path}_difficulty_as_hash")), - hash_price_phs: Indexes3::new(client.clone(), &format!("{base_path}_hash_price_phs")), - hash_price_phs_min: Indexes3::new(client.clone(), &format!("{base_path}_hash_price_phs_min")), - hash_price_rebound: Indexes3::new(client.clone(), &format!("{base_path}_hash_price_rebound")), - hash_price_ths: Indexes3::new(client.clone(), &format!("{base_path}_hash_price_ths")), - hash_price_ths_min: Indexes3::new(client.clone(), &format!("{base_path}_hash_price_ths_min")), - hash_rate: Indexes3::new(client.clone(), &format!("{base_path}_hash_rate")), - hash_rate_1m_sma: Indexes::new(client.clone(), &format!("{base_path}_hash_rate_1m_sma")), - hash_rate_1w_sma: Indexes::new(client.clone(), &format!("{base_path}_hash_rate_1w_sma")), - hash_rate_1y_sma: Indexes::new(client.clone(), &format!("{base_path}_hash_rate_1y_sma")), - hash_rate_2m_sma: Indexes::new(client.clone(), &format!("{base_path}_hash_rate_2m_sma")), - hash_value_phs: Indexes3::new(client.clone(), &format!("{base_path}_hash_value_phs")), - hash_value_phs_min: Indexes3::new(client.clone(), &format!("{base_path}_hash_value_phs_min")), - hash_value_rebound: Indexes3::new(client.clone(), &format!("{base_path}_hash_value_rebound")), - hash_value_ths: Indexes3::new(client.clone(), &format!("{base_path}_hash_value_ths")), - hash_value_ths_min: Indexes3::new(client.clone(), &format!("{base_path}_hash_value_ths_min")), + date: MetricPattern25::new(client.clone(), format!("{base_path}_date")), + date_fixed: MetricPattern25::new(client.clone(), format!("{base_path}_date_fixed")), + timestamp: MetricPattern2::new(client.clone(), format!("{base_path}_timestamp")), + timestamp_fixed: MetricPattern25::new(client.clone(), format!("{base_path}_timestamp_fixed")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Chain_OutputType { - pub emptyoutput_count: BitcoinPattern, - pub exact_utxo_count: Indexes3, - pub opreturn_count: BitcoinPattern, - pub p2a_count: BitcoinPattern, - pub p2ms_count: BitcoinPattern, - pub p2pk33_count: BitcoinPattern, - pub p2pk65_count: BitcoinPattern, - pub p2pkh_count: BitcoinPattern, - pub p2sh_count: BitcoinPattern, - pub p2tr_count: BitcoinPattern, - pub p2wpkh_count: BitcoinPattern, - pub p2wsh_count: BitcoinPattern, - pub unknownoutput_count: BitcoinPattern, +pub struct CatalogTree_Computed_Blocks_Weight { + pub block_fullness: BitcoinPattern, + pub block_weight: BlockSizePattern, } -impl CatalogTree_Computed_Chain_OutputType { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Blocks_Weight { + pub fn new(client: Arc, base_path: String) -> Self { Self { - emptyoutput_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_emptyoutput_count")), - exact_utxo_count: Indexes3::new(client.clone(), &format!("{base_path}_exact_utxo_count")), - opreturn_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_opreturn_count")), - p2a_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_p2a_count")), - p2ms_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_p2ms_count")), - p2pk33_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_p2pk33_count")), - p2pk65_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_p2pk65_count")), - p2pkh_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_p2pkh_count")), - p2sh_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_p2sh_count")), - p2tr_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_p2tr_count")), - p2wpkh_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_p2wpkh_count")), - p2wsh_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_p2wsh_count")), - unknownoutput_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_unknownoutput_count")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Chain_Transaction { - pub fee: CatalogTree_Computed_Chain_Transaction_Fee, - pub fee_rate: CatalogTree_Computed_Chain_Transaction_FeeRate, - pub input_count: BlockSizePattern, - pub input_value: Indexes6, - pub is_coinbase: Indexes6, - pub output_count: BlockSizePattern, - pub output_value: Indexes6, - pub tx_count: BitcoinPattern, - pub tx_v1: BlockCountPattern, - pub tx_v2: BlockCountPattern, - pub tx_v3: BlockCountPattern, - pub tx_vsize: BlockIntervalPattern, - pub tx_weight: BlockIntervalPattern, - pub vsize: Indexes6, - pub weight: Indexes6, -} - -impl CatalogTree_Computed_Chain_Transaction { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - fee: CatalogTree_Computed_Chain_Transaction_Fee::new(client.clone(), &format!("{base_path}_fee")), - fee_rate: CatalogTree_Computed_Chain_Transaction_FeeRate::new(client.clone(), &format!("{base_path}_fee_rate")), - input_count: BlockSizePattern::new(client.clone(), &format!("{base_path}_input_count")), - input_value: Indexes6::new(client.clone(), &format!("{base_path}_input_value")), - is_coinbase: Indexes6::new(client.clone(), &format!("{base_path}_is_coinbase")), - output_count: BlockSizePattern::new(client.clone(), &format!("{base_path}_output_count")), - output_value: Indexes6::new(client.clone(), &format!("{base_path}_output_value")), - tx_count: BitcoinPattern::new(client.clone(), &format!("{base_path}_tx_count")), - tx_v1: BlockCountPattern::new(client.clone(), &format!("{base_path}_tx_v1")), - tx_v2: BlockCountPattern::new(client.clone(), &format!("{base_path}_tx_v2")), - tx_v3: BlockCountPattern::new(client.clone(), &format!("{base_path}_tx_v3")), - tx_vsize: BlockIntervalPattern::new(client.clone(), "tx_vsize"), - tx_weight: BlockIntervalPattern::new(client.clone(), "tx_weight"), - vsize: Indexes6::new(client.clone(), &format!("{base_path}_vsize")), - weight: Indexes6::new(client.clone(), &format!("{base_path}_weight")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Chain_Transaction_Fee { - pub base: Indexes6, - pub bitcoin: BlockSizePattern, - pub bitcoin_txindex: Indexes6, - pub dollars: BlockSizePattern, - pub dollars_txindex: Indexes6, - pub sats: BlockSizePattern, -} - -impl CatalogTree_Computed_Chain_Transaction_Fee { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - base: Indexes6::new(client.clone(), &format!("{base_path}_base")), - bitcoin: BlockSizePattern::new(client.clone(), &format!("{base_path}_bitcoin")), - bitcoin_txindex: Indexes6::new(client.clone(), &format!("{base_path}_bitcoin_txindex")), - dollars: BlockSizePattern::new(client.clone(), &format!("{base_path}_dollars")), - dollars_txindex: Indexes6::new(client.clone(), &format!("{base_path}_dollars_txindex")), - sats: BlockSizePattern::new(client.clone(), &format!("{base_path}_sats")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Chain_Transaction_FeeRate { - pub average: Indexes3, - pub base: Indexes6, - pub max: Indexes3, - pub median: Indexes2, - pub min: Indexes3, - pub pct10: Indexes2, - pub pct25: Indexes2, - pub pct75: Indexes2, - pub pct90: Indexes2, -} - -impl CatalogTree_Computed_Chain_Transaction_FeeRate { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - average: Indexes3::new(client.clone(), &format!("{base_path}_average")), - base: Indexes6::new(client.clone(), &format!("{base_path}_base")), - max: Indexes3::new(client.clone(), &format!("{base_path}_max")), - median: Indexes2::new(client.clone(), &format!("{base_path}_median")), - min: Indexes3::new(client.clone(), &format!("{base_path}_min")), - pct10: Indexes2::new(client.clone(), &format!("{base_path}_pct10")), - pct25: Indexes2::new(client.clone(), &format!("{base_path}_pct25")), - pct75: Indexes2::new(client.clone(), &format!("{base_path}_pct75")), - pct90: Indexes2::new(client.clone(), &format!("{base_path}_pct90")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Chain_Volume { - pub annualized_volume: Indexes, - pub annualized_volume_btc: Indexes, - pub annualized_volume_usd: Indexes, - pub inputs_per_sec: Indexes, - pub outputs_per_sec: Indexes, - pub sent_sum: CatalogTree_Computed_Chain_Volume_SentSum, - pub tx_btc_velocity: Indexes, - pub tx_per_sec: Indexes, - pub tx_usd_velocity: Indexes, -} - -impl CatalogTree_Computed_Chain_Volume { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - annualized_volume: Indexes::new(client.clone(), &format!("{base_path}_annualized_volume")), - annualized_volume_btc: Indexes::new(client.clone(), &format!("{base_path}_annualized_volume_btc")), - annualized_volume_usd: Indexes::new(client.clone(), &format!("{base_path}_annualized_volume_usd")), - inputs_per_sec: Indexes::new(client.clone(), &format!("{base_path}_inputs_per_sec")), - outputs_per_sec: Indexes::new(client.clone(), &format!("{base_path}_outputs_per_sec")), - sent_sum: CatalogTree_Computed_Chain_Volume_SentSum::new(client.clone(), &format!("{base_path}_sent_sum")), - tx_btc_velocity: Indexes::new(client.clone(), &format!("{base_path}_tx_btc_velocity")), - tx_per_sec: Indexes::new(client.clone(), &format!("{base_path}_tx_per_sec")), - tx_usd_velocity: Indexes::new(client.clone(), &format!("{base_path}_tx_usd_velocity")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Chain_Volume_SentSum { - pub bitcoin: BitcoinPattern2, - pub dollars: Indexes3, - pub sats: Indexes3, -} - -impl CatalogTree_Computed_Chain_Volume_SentSum { - pub fn new(client: Arc, base_path: &str) -> Self { - Self { - bitcoin: BitcoinPattern2::new(client.clone(), &format!("{base_path}_bitcoin")), - dollars: Indexes3::new(client.clone(), &format!("{base_path}_dollars")), - sats: Indexes3::new(client.clone(), &format!("{base_path}_sats")), + block_fullness: BitcoinPattern::new(client.clone(), "block_fullness".to_string()), + block_weight: BlockSizePattern::new(client.clone(), "block_weight".to_string()), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Cointime { - pub active_cap: Indexes3, - pub active_price: Indexes3, - pub active_price_ratio: ActivePriceRatioPattern, - pub active_supply: ActiveSupplyPattern, - pub activity_to_vaultedness_ratio: Indexes3, - pub coinblocks_created: BlockCountPattern, - pub coinblocks_stored: BlockCountPattern, - pub cointime_adj_inflation_rate: Indexes, - pub cointime_adj_tx_btc_velocity: Indexes, - pub cointime_adj_tx_usd_velocity: Indexes, - pub cointime_cap: Indexes3, - pub cointime_price: Indexes3, - pub cointime_price_ratio: ActivePriceRatioPattern, - pub cointime_value_created: BlockCountPattern, - pub cointime_value_destroyed: BlockCountPattern, - pub cointime_value_stored: BlockCountPattern, - pub investor_cap: Indexes3, - pub liveliness: Indexes3, - pub thermo_cap: Indexes3, - pub true_market_mean: Indexes3, - pub true_market_mean_ratio: ActivePriceRatioPattern, - pub vaulted_cap: Indexes3, - pub vaulted_price: Indexes3, - pub vaulted_price_ratio: ActivePriceRatioPattern, - pub vaulted_supply: ActiveSupplyPattern, - pub vaultedness: Indexes3, + pub activity: CatalogTree_Computed_Cointime_Activity, + pub adjusted: CatalogTree_Computed_Cointime_Adjusted, + pub cap: CatalogTree_Computed_Cointime_Cap, + pub pricing: CatalogTree_Computed_Cointime_Pricing, + pub supply: CatalogTree_Computed_Cointime_Supply, + pub value: CatalogTree_Computed_Cointime_Value, } impl CatalogTree_Computed_Cointime { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - active_cap: Indexes3::new(client.clone(), &format!("{base_path}_active_cap")), - active_price: Indexes3::new(client.clone(), &format!("{base_path}_active_price")), - active_price_ratio: ActivePriceRatioPattern::new(client.clone(), &format!("{base_path}_active_price_ratio")), - active_supply: ActiveSupplyPattern::new(client.clone(), &format!("{base_path}_active_supply")), - activity_to_vaultedness_ratio: Indexes3::new(client.clone(), &format!("{base_path}_activity_to_vaultedness_ratio")), - coinblocks_created: BlockCountPattern::new(client.clone(), &format!("{base_path}_coinblocks_created")), - coinblocks_stored: BlockCountPattern::new(client.clone(), &format!("{base_path}_coinblocks_stored")), - cointime_adj_inflation_rate: Indexes::new(client.clone(), &format!("{base_path}_cointime_adj_inflation_rate")), - cointime_adj_tx_btc_velocity: Indexes::new(client.clone(), &format!("{base_path}_cointime_adj_tx_btc_velocity")), - cointime_adj_tx_usd_velocity: Indexes::new(client.clone(), &format!("{base_path}_cointime_adj_tx_usd_velocity")), - cointime_cap: Indexes3::new(client.clone(), &format!("{base_path}_cointime_cap")), - cointime_price: Indexes3::new(client.clone(), &format!("{base_path}_cointime_price")), - cointime_price_ratio: ActivePriceRatioPattern::new(client.clone(), &format!("{base_path}_cointime_price_ratio")), - cointime_value_created: BlockCountPattern::new(client.clone(), &format!("{base_path}_cointime_value_created")), - cointime_value_destroyed: BlockCountPattern::new(client.clone(), &format!("{base_path}_cointime_value_destroyed")), - cointime_value_stored: BlockCountPattern::new(client.clone(), &format!("{base_path}_cointime_value_stored")), - investor_cap: Indexes3::new(client.clone(), &format!("{base_path}_investor_cap")), - liveliness: Indexes3::new(client.clone(), &format!("{base_path}_liveliness")), - thermo_cap: Indexes3::new(client.clone(), &format!("{base_path}_thermo_cap")), - true_market_mean: Indexes3::new(client.clone(), &format!("{base_path}_true_market_mean")), - true_market_mean_ratio: ActivePriceRatioPattern::new(client.clone(), &format!("{base_path}_true_market_mean_ratio")), - vaulted_cap: Indexes3::new(client.clone(), &format!("{base_path}_vaulted_cap")), - vaulted_price: Indexes3::new(client.clone(), &format!("{base_path}_vaulted_price")), - vaulted_price_ratio: ActivePriceRatioPattern::new(client.clone(), &format!("{base_path}_vaulted_price_ratio")), - vaulted_supply: ActiveSupplyPattern::new(client.clone(), &format!("{base_path}_vaulted_supply")), - vaultedness: Indexes3::new(client.clone(), &format!("{base_path}_vaultedness")), + activity: CatalogTree_Computed_Cointime_Activity::new(client.clone(), format!("{base_path}_activity")), + adjusted: CatalogTree_Computed_Cointime_Adjusted::new(client.clone(), format!("{base_path}_adjusted")), + cap: CatalogTree_Computed_Cointime_Cap::new(client.clone(), format!("{base_path}_cap")), + pricing: CatalogTree_Computed_Cointime_Pricing::new(client.clone(), format!("{base_path}_pricing")), + supply: CatalogTree_Computed_Cointime_Supply::new(client.clone(), format!("{base_path}_supply")), + value: CatalogTree_Computed_Cointime_Value::new(client.clone(), format!("{base_path}_value")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Cointime_Activity { + pub activity_to_vaultedness_ratio: MetricPattern1, + pub coinblocks_created: BlockCountPattern, + pub coinblocks_stored: BlockCountPattern, + pub liveliness: MetricPattern1, + pub vaultedness: MetricPattern1, +} + +impl CatalogTree_Computed_Cointime_Activity { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + activity_to_vaultedness_ratio: MetricPattern1::new(client.clone(), format!("{base_path}_activity_to_vaultedness_ratio")), + coinblocks_created: BlockCountPattern::new(client.clone(), "coinblocks_created".to_string()), + coinblocks_stored: BlockCountPattern::new(client.clone(), "coinblocks_stored".to_string()), + liveliness: MetricPattern1::new(client.clone(), format!("{base_path}_liveliness")), + vaultedness: MetricPattern1::new(client.clone(), format!("{base_path}_vaultedness")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Cointime_Adjusted { + pub cointime_adj_inflation_rate: MetricPattern4, + pub cointime_adj_tx_btc_velocity: MetricPattern4, + pub cointime_adj_tx_usd_velocity: MetricPattern4, +} + +impl CatalogTree_Computed_Cointime_Adjusted { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + cointime_adj_inflation_rate: MetricPattern4::new(client.clone(), format!("{base_path}_cointime_adj_inflation_rate")), + cointime_adj_tx_btc_velocity: MetricPattern4::new(client.clone(), format!("{base_path}_cointime_adj_tx_btc_velocity")), + cointime_adj_tx_usd_velocity: MetricPattern4::new(client.clone(), format!("{base_path}_cointime_adj_tx_usd_velocity")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Cointime_Cap { + pub active_cap: MetricPattern1, + pub cointime_cap: MetricPattern1, + pub investor_cap: MetricPattern1, + pub thermo_cap: MetricPattern1, + pub vaulted_cap: MetricPattern1, +} + +impl CatalogTree_Computed_Cointime_Cap { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + active_cap: MetricPattern1::new(client.clone(), format!("{base_path}_active_cap")), + cointime_cap: MetricPattern1::new(client.clone(), format!("{base_path}_cointime_cap")), + investor_cap: MetricPattern1::new(client.clone(), format!("{base_path}_investor_cap")), + thermo_cap: MetricPattern1::new(client.clone(), format!("{base_path}_thermo_cap")), + vaulted_cap: MetricPattern1::new(client.clone(), format!("{base_path}_vaulted_cap")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Cointime_Pricing { + pub active_price: MetricPattern1, + pub active_price_ratio: ActivePriceRatioPattern, + pub cointime_price: MetricPattern1, + pub cointime_price_ratio: ActivePriceRatioPattern, + pub true_market_mean: MetricPattern1, + pub true_market_mean_ratio: ActivePriceRatioPattern, + pub vaulted_price: MetricPattern1, + pub vaulted_price_ratio: ActivePriceRatioPattern, +} + +impl CatalogTree_Computed_Cointime_Pricing { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + active_price: MetricPattern1::new(client.clone(), format!("{base_path}_active_price")), + active_price_ratio: ActivePriceRatioPattern::new(client.clone(), "active_price_ratio".to_string()), + cointime_price: MetricPattern1::new(client.clone(), format!("{base_path}_cointime_price")), + cointime_price_ratio: ActivePriceRatioPattern::new(client.clone(), "cointime_price_ratio".to_string()), + true_market_mean: MetricPattern1::new(client.clone(), format!("{base_path}_true_market_mean")), + true_market_mean_ratio: ActivePriceRatioPattern::new(client.clone(), "true_market_mean_ratio".to_string()), + vaulted_price: MetricPattern1::new(client.clone(), format!("{base_path}_vaulted_price")), + vaulted_price_ratio: ActivePriceRatioPattern::new(client.clone(), "vaulted_price_ratio".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Cointime_Supply { + pub active_supply: ActiveSupplyPattern, + pub vaulted_supply: ActiveSupplyPattern, +} + +impl CatalogTree_Computed_Cointime_Supply { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + active_supply: ActiveSupplyPattern::new(client.clone(), "active_supply".to_string()), + vaulted_supply: ActiveSupplyPattern::new(client.clone(), "vaulted_supply".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Cointime_Value { + pub cointime_value_created: BlockCountPattern, + pub cointime_value_destroyed: BlockCountPattern, + pub cointime_value_stored: BlockCountPattern, +} + +impl CatalogTree_Computed_Cointime_Value { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + cointime_value_created: BlockCountPattern::new(client.clone(), "cointime_value_created".to_string()), + cointime_value_destroyed: BlockCountPattern::new(client.clone(), "cointime_value_destroyed".to_string()), + cointime_value_stored: BlockCountPattern::new(client.clone(), "cointime_value_stored".to_string()), } } } @@ -2140,35 +4856,661 @@ pub struct CatalogTree_Computed_Constants { } impl CatalogTree_Computed_Constants { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - constant_0: Constant0Pattern::new(client.clone(), "constant_0"), - constant_1: Constant0Pattern::new(client.clone(), "constant_1"), - constant_100: Constant0Pattern::new(client.clone(), "constant_100"), - constant_2: Constant0Pattern::new(client.clone(), "constant_2"), - constant_3: Constant0Pattern::new(client.clone(), "constant_3"), - constant_38_2: Constant0Pattern::new(client.clone(), "constant_38_2"), - constant_4: Constant0Pattern::new(client.clone(), "constant_4"), - constant_50: Constant0Pattern::new(client.clone(), "constant_50"), - constant_600: Constant0Pattern::new(client.clone(), "constant_600"), - constant_61_8: Constant0Pattern::new(client.clone(), "constant_61_8"), - constant_minus_1: Constant0Pattern::new(client.clone(), "constant_minus_1"), - constant_minus_2: Constant0Pattern::new(client.clone(), "constant_minus_2"), - constant_minus_3: Constant0Pattern::new(client.clone(), "constant_minus_3"), - constant_minus_4: Constant0Pattern::new(client.clone(), "constant_minus_4"), + constant_0: Constant0Pattern::new(client.clone(), "constant_0".to_string()), + constant_1: Constant0Pattern::new(client.clone(), "constant_1".to_string()), + constant_100: Constant0Pattern::new(client.clone(), "constant_100".to_string()), + constant_2: Constant0Pattern::new(client.clone(), "constant_2".to_string()), + constant_3: Constant0Pattern::new(client.clone(), "constant_3".to_string()), + constant_38_2: Constant0Pattern::new(client.clone(), "constant_38_2".to_string()), + constant_4: Constant0Pattern::new(client.clone(), "constant_4".to_string()), + constant_50: Constant0Pattern::new(client.clone(), "constant_50".to_string()), + constant_600: Constant0Pattern::new(client.clone(), "constant_600".to_string()), + constant_61_8: Constant0Pattern::new(client.clone(), "constant_61_8".to_string()), + constant_minus_1: Constant0Pattern::new(client.clone(), "constant_minus_1".to_string()), + constant_minus_2: Constant0Pattern::new(client.clone(), "constant_minus_2".to_string()), + constant_minus_3: Constant0Pattern::new(client.clone(), "constant_minus_3".to_string()), + constant_minus_4: Constant0Pattern::new(client.clone(), "constant_minus_4".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Fetched { - pub price_ohlc_in_cents: Indexes13, +pub struct CatalogTree_Computed_Distribution { + pub addr_count: MetricPattern1, + pub address_cohorts: CatalogTree_Computed_Distribution_AddressCohorts, + pub addresses_data: CatalogTree_Computed_Distribution_AddressesData, + pub addresstype_to_height_to_addr_count: AddresstypeToHeightToAddrCountPattern, + pub addresstype_to_height_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern, + pub addresstype_to_indexes_to_addr_count: AddresstypeToHeightToAddrCountPattern, + pub addresstype_to_indexes_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern, + pub any_address_indexes: AddresstypeToHeightToAddrCountPattern, + pub chain_state: MetricPattern25, + pub empty_addr_count: MetricPattern1, + pub emptyaddressindex: MetricPattern46, + pub loadedaddressindex: MetricPattern45, + pub utxo_cohorts: CatalogTree_Computed_Distribution_UtxoCohorts, } -impl CatalogTree_Computed_Fetched { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Distribution { + pub fn new(client: Arc, base_path: String) -> Self { Self { - price_ohlc_in_cents: Indexes13::new(client.clone(), &format!("{base_path}_price_ohlc_in_cents")), + addr_count: MetricPattern1::new(client.clone(), format!("{base_path}_addr_count")), + address_cohorts: CatalogTree_Computed_Distribution_AddressCohorts::new(client.clone(), format!("{base_path}_address_cohorts")), + addresses_data: CatalogTree_Computed_Distribution_AddressesData::new(client.clone(), format!("{base_path}_addresses_data")), + addresstype_to_height_to_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "".to_string()), + addresstype_to_height_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "".to_string()), + addresstype_to_indexes_to_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "".to_string()), + addresstype_to_indexes_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "".to_string()), + any_address_indexes: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "anyaddressindex".to_string()), + chain_state: MetricPattern25::new(client.clone(), format!("{base_path}_chain_state")), + empty_addr_count: MetricPattern1::new(client.clone(), format!("{base_path}_empty_addr_count")), + emptyaddressindex: MetricPattern46::new(client.clone(), format!("{base_path}_emptyaddressindex")), + loadedaddressindex: MetricPattern45::new(client.clone(), format!("{base_path}_loadedaddressindex")), + utxo_cohorts: CatalogTree_Computed_Distribution_UtxoCohorts::new(client.clone(), format!("{base_path}_utxo_cohorts")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_AddressCohorts { + pub amount_range: CatalogTree_Computed_Distribution_AddressCohorts_AmountRange, + pub ge_amount: CatalogTree_Computed_Distribution_AddressCohorts_GeAmount, + pub lt_amount: CatalogTree_Computed_Distribution_AddressCohorts_LtAmount, +} + +impl CatalogTree_Computed_Distribution_AddressCohorts { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + amount_range: CatalogTree_Computed_Distribution_AddressCohorts_AmountRange::new(client.clone(), format!("{base_path}_amount_range")), + ge_amount: CatalogTree_Computed_Distribution_AddressCohorts_GeAmount::new(client.clone(), format!("{base_path}_ge_amount")), + lt_amount: CatalogTree_Computed_Distribution_AddressCohorts_LtAmount::new(client.clone(), format!("{base_path}_lt_amount")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_AddressCohorts_AmountRange { + pub _0sats: _0satsPattern, + pub _100btc_to_1k_btc: _0satsPattern, + pub _100k_btc_or_more: _0satsPattern, + pub _100k_sats_to_1m_sats: _0satsPattern, + pub _100sats_to_1k_sats: _0satsPattern, + pub _10btc_to_100btc: _0satsPattern, + pub _10k_btc_to_100k_btc: _0satsPattern, + pub _10k_sats_to_100k_sats: _0satsPattern, + pub _10m_sats_to_1btc: _0satsPattern, + pub _10sats_to_100sats: _0satsPattern, + pub _1btc_to_10btc: _0satsPattern, + pub _1k_btc_to_10k_btc: _0satsPattern, + pub _1k_sats_to_10k_sats: _0satsPattern, + pub _1m_sats_to_10m_sats: _0satsPattern, + pub _1sat_to_10sats: _0satsPattern, +} + +impl CatalogTree_Computed_Distribution_AddressCohorts_AmountRange { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _0sats: _0satsPattern::new(client.clone(), "addrs_with_0sats".to_string()), + _100btc_to_1k_btc: _0satsPattern::new(client.clone(), "addrs_above_100btc_under_1k_btc".to_string()), + _100k_btc_or_more: _0satsPattern::new(client.clone(), "addrs_above_100k_btc".to_string()), + _100k_sats_to_1m_sats: _0satsPattern::new(client.clone(), "addrs_above_100k_sats_under_1m_sats".to_string()), + _100sats_to_1k_sats: _0satsPattern::new(client.clone(), "addrs_above_100sats_under_1k_sats".to_string()), + _10btc_to_100btc: _0satsPattern::new(client.clone(), "addrs_above_10btc_under_100btc".to_string()), + _10k_btc_to_100k_btc: _0satsPattern::new(client.clone(), "addrs_above_10k_btc_under_100k_btc".to_string()), + _10k_sats_to_100k_sats: _0satsPattern::new(client.clone(), "addrs_above_10k_sats_under_100k_sats".to_string()), + _10m_sats_to_1btc: _0satsPattern::new(client.clone(), "addrs_above_10m_sats_under_1btc".to_string()), + _10sats_to_100sats: _0satsPattern::new(client.clone(), "addrs_above_10sats_under_100sats".to_string()), + _1btc_to_10btc: _0satsPattern::new(client.clone(), "addrs_above_1btc_under_10btc".to_string()), + _1k_btc_to_10k_btc: _0satsPattern::new(client.clone(), "addrs_above_1k_btc_under_10k_btc".to_string()), + _1k_sats_to_10k_sats: _0satsPattern::new(client.clone(), "addrs_above_1k_sats_under_10k_sats".to_string()), + _1m_sats_to_10m_sats: _0satsPattern::new(client.clone(), "addrs_above_1m_sats_under_10m_sats".to_string()), + _1sat_to_10sats: _0satsPattern::new(client.clone(), "addrs_above_1sat_under_10sats".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_AddressCohorts_GeAmount { + pub _100btc: _0satsPattern, + pub _100k_sats: _0satsPattern, + pub _100sats: _0satsPattern, + pub _10btc: _0satsPattern, + pub _10k_btc: _0satsPattern, + pub _10k_sats: _0satsPattern, + pub _10m_sats: _0satsPattern, + pub _10sats: _0satsPattern, + pub _1btc: _0satsPattern, + pub _1k_btc: _0satsPattern, + pub _1k_sats: _0satsPattern, + pub _1m_sats: _0satsPattern, + pub _1sat: _0satsPattern, +} + +impl CatalogTree_Computed_Distribution_AddressCohorts_GeAmount { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _100btc: _0satsPattern::new(client.clone(), "addrs_above_100btc".to_string()), + _100k_sats: _0satsPattern::new(client.clone(), "addrs_above_100k_sats".to_string()), + _100sats: _0satsPattern::new(client.clone(), "addrs_above_100sats".to_string()), + _10btc: _0satsPattern::new(client.clone(), "addrs_above_10btc".to_string()), + _10k_btc: _0satsPattern::new(client.clone(), "addrs_above_10k_btc".to_string()), + _10k_sats: _0satsPattern::new(client.clone(), "addrs_above_10k_sats".to_string()), + _10m_sats: _0satsPattern::new(client.clone(), "addrs_above_10m_sats".to_string()), + _10sats: _0satsPattern::new(client.clone(), "addrs_above_10sats".to_string()), + _1btc: _0satsPattern::new(client.clone(), "addrs_above_1btc".to_string()), + _1k_btc: _0satsPattern::new(client.clone(), "addrs_above_1k_btc".to_string()), + _1k_sats: _0satsPattern::new(client.clone(), "addrs_above_1k_sats".to_string()), + _1m_sats: _0satsPattern::new(client.clone(), "addrs_above_1m_sats".to_string()), + _1sat: _0satsPattern::new(client.clone(), "addrs_above_1sat".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_AddressCohorts_LtAmount { + pub _100btc: _0satsPattern, + pub _100k_btc: _0satsPattern, + pub _100k_sats: _0satsPattern, + pub _100sats: _0satsPattern, + pub _10btc: _0satsPattern, + pub _10k_btc: _0satsPattern, + pub _10k_sats: _0satsPattern, + pub _10m_sats: _0satsPattern, + pub _10sats: _0satsPattern, + pub _1btc: _0satsPattern, + pub _1k_btc: _0satsPattern, + pub _1k_sats: _0satsPattern, + pub _1m_sats: _0satsPattern, +} + +impl CatalogTree_Computed_Distribution_AddressCohorts_LtAmount { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _100btc: _0satsPattern::new(client.clone(), "addrs_under_100btc".to_string()), + _100k_btc: _0satsPattern::new(client.clone(), "addrs_under_100k_btc".to_string()), + _100k_sats: _0satsPattern::new(client.clone(), "addrs_under_100k_sats".to_string()), + _100sats: _0satsPattern::new(client.clone(), "addrs_under_100sats".to_string()), + _10btc: _0satsPattern::new(client.clone(), "addrs_under_10btc".to_string()), + _10k_btc: _0satsPattern::new(client.clone(), "addrs_under_10k_btc".to_string()), + _10k_sats: _0satsPattern::new(client.clone(), "addrs_under_10k_sats".to_string()), + _10m_sats: _0satsPattern::new(client.clone(), "addrs_under_10m_sats".to_string()), + _10sats: _0satsPattern::new(client.clone(), "addrs_under_10sats".to_string()), + _1btc: _0satsPattern::new(client.clone(), "addrs_under_1btc".to_string()), + _1k_btc: _0satsPattern::new(client.clone(), "addrs_under_1k_btc".to_string()), + _1k_sats: _0satsPattern::new(client.clone(), "addrs_under_1k_sats".to_string()), + _1m_sats: _0satsPattern::new(client.clone(), "addrs_under_1m_sats".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_AddressesData { + pub empty: MetricPattern46, + pub loaded: MetricPattern45, +} + +impl CatalogTree_Computed_Distribution_AddressesData { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + empty: MetricPattern46::new(client.clone(), format!("{base_path}_empty")), + loaded: MetricPattern45::new(client.clone(), format!("{base_path}_loaded")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts { + pub age_range: CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange, + pub all: CatalogTree_Computed_Distribution_UtxoCohorts_All, + pub amount_range: CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange, + pub epoch: CatalogTree_Computed_Distribution_UtxoCohorts_Epoch, + pub ge_amount: CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount, + pub lt_amount: CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount, + pub max_age: CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge, + pub min_age: CatalogTree_Computed_Distribution_UtxoCohorts_MinAge, + pub term: CatalogTree_Computed_Distribution_UtxoCohorts_Term, + pub type_: CatalogTree_Computed_Distribution_UtxoCohorts_Type, + pub year: CatalogTree_Computed_Distribution_UtxoCohorts_Year, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + age_range: CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange::new(client.clone(), format!("{base_path}_age_range")), + all: CatalogTree_Computed_Distribution_UtxoCohorts_All::new(client.clone(), format!("{base_path}_all")), + amount_range: CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange::new(client.clone(), format!("{base_path}_amount_range")), + epoch: CatalogTree_Computed_Distribution_UtxoCohorts_Epoch::new(client.clone(), format!("{base_path}_epoch")), + ge_amount: CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount::new(client.clone(), format!("{base_path}_ge_amount")), + lt_amount: CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount::new(client.clone(), format!("{base_path}_lt_amount")), + max_age: CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge::new(client.clone(), format!("{base_path}_max_age")), + min_age: CatalogTree_Computed_Distribution_UtxoCohorts_MinAge::new(client.clone(), format!("{base_path}_min_age")), + term: CatalogTree_Computed_Distribution_UtxoCohorts_Term::new(client.clone(), format!("{base_path}_term")), + type_: CatalogTree_Computed_Distribution_UtxoCohorts_Type::new(client.clone(), format!("{base_path}_type_")), + year: CatalogTree_Computed_Distribution_UtxoCohorts_Year::new(client.clone(), format!("{base_path}_year")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange { + pub _10y_to_12y: _10yTo12yPattern, + pub _12y_to_15y: _10yTo12yPattern, + pub _1d_to_1w: _10yTo12yPattern, + pub _1m_to_2m: _10yTo12yPattern, + pub _1w_to_1m: _10yTo12yPattern, + pub _1y_to_2y: _10yTo12yPattern, + pub _2m_to_3m: _10yTo12yPattern, + pub _2y_to_3y: _10yTo12yPattern, + pub _3m_to_4m: _10yTo12yPattern, + pub _3y_to_4y: _10yTo12yPattern, + pub _4m_to_5m: _10yTo12yPattern, + pub _4y_to_5y: _10yTo12yPattern, + pub _5m_to_6m: _10yTo12yPattern, + pub _5y_to_6y: _10yTo12yPattern, + pub _6m_to_1y: _10yTo12yPattern, + pub _6y_to_7y: _10yTo12yPattern, + pub _7y_to_8y: _10yTo12yPattern, + pub _8y_to_10y: _10yTo12yPattern, + pub from_15y: _10yTo12yPattern, + pub up_to_1d: UpTo1dPattern, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _10y_to_12y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_10y_up_to_12y_old".to_string()), + _12y_to_15y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_12y_up_to_15y_old".to_string()), + _1d_to_1w: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1d_up_to_1w_old".to_string()), + _1m_to_2m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1m_up_to_2m_old".to_string()), + _1w_to_1m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1w_up_to_1m_old".to_string()), + _1y_to_2y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1y_up_to_2y_old".to_string()), + _2m_to_3m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_2m_up_to_3m_old".to_string()), + _2y_to_3y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_2y_up_to_3y_old".to_string()), + _3m_to_4m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_3m_up_to_4m_old".to_string()), + _3y_to_4y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_3y_up_to_4y_old".to_string()), + _4m_to_5m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_4m_up_to_5m_old".to_string()), + _4y_to_5y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_4y_up_to_5y_old".to_string()), + _5m_to_6m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_5m_up_to_6m_old".to_string()), + _5y_to_6y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_5y_up_to_6y_old".to_string()), + _6m_to_1y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_6m_up_to_1y_old".to_string()), + _6y_to_7y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_6y_up_to_7y_old".to_string()), + _7y_to_8y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_7y_up_to_8y_old".to_string()), + _8y_to_10y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_8y_up_to_10y_old".to_string()), + from_15y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_15y_old".to_string()), + up_to_1d: UpTo1dPattern::new(client.clone(), "utxos_up_to_1d_old".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_All { + pub activity: ActivityPattern2, + pub cost_basis: CostBasisPattern2, + pub realized: RealizedPattern3, + pub relative: CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative, + pub supply: SupplyPattern3, + pub unrealized: UnrealizedPattern, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_All { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + activity: ActivityPattern2::new(client.clone(), "".to_string()), + cost_basis: CostBasisPattern2::new(client.clone(), "".to_string()), + realized: RealizedPattern3::new(client.clone(), "".to_string()), + relative: CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative::new(client.clone(), format!("{base_path}_relative")), + supply: SupplyPattern3::new(client.clone(), "".to_string()), + unrealized: UnrealizedPattern::new(client.clone(), "".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative { + pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5, + pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3, + pub supply_in_loss_rel_to_own_supply: MetricPattern5, + pub supply_in_profit_rel_to_own_supply: MetricPattern5, + pub unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5, + pub unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern5, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5::new(client.clone(), format!("{base_path}_neg_unrealized_loss_rel_to_own_total_unrealized_pnl")), + net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3::new(client.clone(), format!("{base_path}_net_unrealized_pnl_rel_to_own_total_unrealized_pnl")), + supply_in_loss_rel_to_own_supply: MetricPattern5::new(client.clone(), format!("{base_path}_supply_in_loss_rel_to_own_supply")), + supply_in_profit_rel_to_own_supply: MetricPattern5::new(client.clone(), format!("{base_path}_supply_in_profit_rel_to_own_supply")), + unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5::new(client.clone(), format!("{base_path}_unrealized_loss_rel_to_own_total_unrealized_pnl")), + unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern5::new(client.clone(), format!("{base_path}_unrealized_profit_rel_to_own_total_unrealized_pnl")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange { + pub _0sats: _0satsPattern2, + pub _100btc_to_1k_btc: _0satsPattern2, + pub _100k_btc_or_more: _0satsPattern2, + pub _100k_sats_to_1m_sats: _0satsPattern2, + pub _100sats_to_1k_sats: _0satsPattern2, + pub _10btc_to_100btc: _0satsPattern2, + pub _10k_btc_to_100k_btc: _0satsPattern2, + pub _10k_sats_to_100k_sats: _0satsPattern2, + pub _10m_sats_to_1btc: _0satsPattern2, + pub _10sats_to_100sats: _0satsPattern2, + pub _1btc_to_10btc: _0satsPattern2, + pub _1k_btc_to_10k_btc: _0satsPattern2, + pub _1k_sats_to_10k_sats: _0satsPattern2, + pub _1m_sats_to_10m_sats: _0satsPattern2, + pub _1sat_to_10sats: _0satsPattern2, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _0sats: _0satsPattern2::new(client.clone(), "utxos_with_0sats".to_string()), + _100btc_to_1k_btc: _0satsPattern2::new(client.clone(), "utxos_above_100btc_under_1k_btc".to_string()), + _100k_btc_or_more: _0satsPattern2::new(client.clone(), "utxos_above_100k_btc".to_string()), + _100k_sats_to_1m_sats: _0satsPattern2::new(client.clone(), "utxos_above_100k_sats_under_1m_sats".to_string()), + _100sats_to_1k_sats: _0satsPattern2::new(client.clone(), "utxos_above_100sats_under_1k_sats".to_string()), + _10btc_to_100btc: _0satsPattern2::new(client.clone(), "utxos_above_10btc_under_100btc".to_string()), + _10k_btc_to_100k_btc: _0satsPattern2::new(client.clone(), "utxos_above_10k_btc_under_100k_btc".to_string()), + _10k_sats_to_100k_sats: _0satsPattern2::new(client.clone(), "utxos_above_10k_sats_under_100k_sats".to_string()), + _10m_sats_to_1btc: _0satsPattern2::new(client.clone(), "utxos_above_10m_sats_under_1btc".to_string()), + _10sats_to_100sats: _0satsPattern2::new(client.clone(), "utxos_above_10sats_under_100sats".to_string()), + _1btc_to_10btc: _0satsPattern2::new(client.clone(), "utxos_above_1btc_under_10btc".to_string()), + _1k_btc_to_10k_btc: _0satsPattern2::new(client.clone(), "utxos_above_1k_btc_under_10k_btc".to_string()), + _1k_sats_to_10k_sats: _0satsPattern2::new(client.clone(), "utxos_above_1k_sats_under_10k_sats".to_string()), + _1m_sats_to_10m_sats: _0satsPattern2::new(client.clone(), "utxos_above_1m_sats_under_10m_sats".to_string()), + _1sat_to_10sats: _0satsPattern2::new(client.clone(), "utxos_above_1sat_under_10sats".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Epoch { + pub _0: _10yTo12yPattern, + pub _1: _10yTo12yPattern, + pub _2: _10yTo12yPattern, + pub _3: _10yTo12yPattern, + pub _4: _10yTo12yPattern, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_Epoch { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _0: _10yTo12yPattern::new(client.clone(), "epoch_0".to_string()), + _1: _10yTo12yPattern::new(client.clone(), "epoch_1".to_string()), + _2: _10yTo12yPattern::new(client.clone(), "epoch_2".to_string()), + _3: _10yTo12yPattern::new(client.clone(), "epoch_3".to_string()), + _4: _10yTo12yPattern::new(client.clone(), "epoch_4".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount { + pub _100btc: _0satsPattern2, + pub _100k_sats: _0satsPattern2, + pub _100sats: _0satsPattern2, + pub _10btc: _0satsPattern2, + pub _10k_btc: _0satsPattern2, + pub _10k_sats: _0satsPattern2, + pub _10m_sats: _0satsPattern2, + pub _10sats: _0satsPattern2, + pub _1btc: _0satsPattern2, + pub _1k_btc: _0satsPattern2, + pub _1k_sats: _0satsPattern2, + pub _1m_sats: _0satsPattern2, + pub _1sat: _0satsPattern2, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _100btc: _0satsPattern2::new(client.clone(), "utxos_above_100btc".to_string()), + _100k_sats: _0satsPattern2::new(client.clone(), "utxos_above_100k_sats".to_string()), + _100sats: _0satsPattern2::new(client.clone(), "utxos_above_100sats".to_string()), + _10btc: _0satsPattern2::new(client.clone(), "utxos_above_10btc".to_string()), + _10k_btc: _0satsPattern2::new(client.clone(), "utxos_above_10k_btc".to_string()), + _10k_sats: _0satsPattern2::new(client.clone(), "utxos_above_10k_sats".to_string()), + _10m_sats: _0satsPattern2::new(client.clone(), "utxos_above_10m_sats".to_string()), + _10sats: _0satsPattern2::new(client.clone(), "utxos_above_10sats".to_string()), + _1btc: _0satsPattern2::new(client.clone(), "utxos_above_1btc".to_string()), + _1k_btc: _0satsPattern2::new(client.clone(), "utxos_above_1k_btc".to_string()), + _1k_sats: _0satsPattern2::new(client.clone(), "utxos_above_1k_sats".to_string()), + _1m_sats: _0satsPattern2::new(client.clone(), "utxos_above_1m_sats".to_string()), + _1sat: _0satsPattern2::new(client.clone(), "utxos_above_1sat".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount { + pub _100btc: _0satsPattern2, + pub _100k_btc: _0satsPattern2, + pub _100k_sats: _0satsPattern2, + pub _100sats: _0satsPattern2, + pub _10btc: _0satsPattern2, + pub _10k_btc: _0satsPattern2, + pub _10k_sats: _0satsPattern2, + pub _10m_sats: _0satsPattern2, + pub _10sats: _0satsPattern2, + pub _1btc: _0satsPattern2, + pub _1k_btc: _0satsPattern2, + pub _1k_sats: _0satsPattern2, + pub _1m_sats: _0satsPattern2, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _100btc: _0satsPattern2::new(client.clone(), "utxos_under_100btc".to_string()), + _100k_btc: _0satsPattern2::new(client.clone(), "utxos_under_100k_btc".to_string()), + _100k_sats: _0satsPattern2::new(client.clone(), "utxos_under_100k_sats".to_string()), + _100sats: _0satsPattern2::new(client.clone(), "utxos_under_100sats".to_string()), + _10btc: _0satsPattern2::new(client.clone(), "utxos_under_10btc".to_string()), + _10k_btc: _0satsPattern2::new(client.clone(), "utxos_under_10k_btc".to_string()), + _10k_sats: _0satsPattern2::new(client.clone(), "utxos_under_10k_sats".to_string()), + _10m_sats: _0satsPattern2::new(client.clone(), "utxos_under_10m_sats".to_string()), + _10sats: _0satsPattern2::new(client.clone(), "utxos_under_10sats".to_string()), + _1btc: _0satsPattern2::new(client.clone(), "utxos_under_1btc".to_string()), + _1k_btc: _0satsPattern2::new(client.clone(), "utxos_under_1k_btc".to_string()), + _1k_sats: _0satsPattern2::new(client.clone(), "utxos_under_1k_sats".to_string()), + _1m_sats: _0satsPattern2::new(client.clone(), "utxos_under_1m_sats".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge { + pub _10y: UpTo1dPattern, + pub _12y: UpTo1dPattern, + pub _15y: UpTo1dPattern, + pub _1m: UpTo1dPattern, + pub _1w: UpTo1dPattern, + pub _1y: UpTo1dPattern, + pub _2m: UpTo1dPattern, + pub _2y: UpTo1dPattern, + pub _3m: UpTo1dPattern, + pub _3y: UpTo1dPattern, + pub _4m: UpTo1dPattern, + pub _4y: UpTo1dPattern, + pub _5m: UpTo1dPattern, + pub _5y: UpTo1dPattern, + pub _6m: UpTo1dPattern, + pub _6y: UpTo1dPattern, + pub _7y: UpTo1dPattern, + pub _8y: UpTo1dPattern, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _10y: UpTo1dPattern::new(client.clone(), "utxos_up_to_10y_old".to_string()), + _12y: UpTo1dPattern::new(client.clone(), "utxos_up_to_12y_old".to_string()), + _15y: UpTo1dPattern::new(client.clone(), "utxos_up_to_15y_old".to_string()), + _1m: UpTo1dPattern::new(client.clone(), "utxos_up_to_1m_old".to_string()), + _1w: UpTo1dPattern::new(client.clone(), "utxos_up_to_1w_old".to_string()), + _1y: UpTo1dPattern::new(client.clone(), "utxos_up_to_1y_old".to_string()), + _2m: UpTo1dPattern::new(client.clone(), "utxos_up_to_2m_old".to_string()), + _2y: UpTo1dPattern::new(client.clone(), "utxos_up_to_2y_old".to_string()), + _3m: UpTo1dPattern::new(client.clone(), "utxos_up_to_3m_old".to_string()), + _3y: UpTo1dPattern::new(client.clone(), "utxos_up_to_3y_old".to_string()), + _4m: UpTo1dPattern::new(client.clone(), "utxos_up_to_4m_old".to_string()), + _4y: UpTo1dPattern::new(client.clone(), "utxos_up_to_4y_old".to_string()), + _5m: UpTo1dPattern::new(client.clone(), "utxos_up_to_5m_old".to_string()), + _5y: UpTo1dPattern::new(client.clone(), "utxos_up_to_5y_old".to_string()), + _6m: UpTo1dPattern::new(client.clone(), "utxos_up_to_6m_old".to_string()), + _6y: UpTo1dPattern::new(client.clone(), "utxos_up_to_6y_old".to_string()), + _7y: UpTo1dPattern::new(client.clone(), "utxos_up_to_7y_old".to_string()), + _8y: UpTo1dPattern::new(client.clone(), "utxos_up_to_8y_old".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_MinAge { + pub _10y: _10yTo12yPattern, + pub _12y: _10yTo12yPattern, + pub _1d: _10yTo12yPattern, + pub _1m: _10yTo12yPattern, + pub _1w: _10yTo12yPattern, + pub _1y: _10yTo12yPattern, + pub _2m: _10yTo12yPattern, + pub _2y: _10yTo12yPattern, + pub _3m: _10yTo12yPattern, + pub _3y: _10yTo12yPattern, + pub _4m: _10yTo12yPattern, + pub _4y: _10yTo12yPattern, + pub _5m: _10yTo12yPattern, + pub _5y: _10yTo12yPattern, + pub _6m: _10yTo12yPattern, + pub _6y: _10yTo12yPattern, + pub _7y: _10yTo12yPattern, + pub _8y: _10yTo12yPattern, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_MinAge { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _10y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_10y_old".to_string()), + _12y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_12y_old".to_string()), + _1d: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1d_old".to_string()), + _1m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1m_old".to_string()), + _1w: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1w_old".to_string()), + _1y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1y_old".to_string()), + _2m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_2m_old".to_string()), + _2y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_2y_old".to_string()), + _3m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_3m_old".to_string()), + _3y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_3y_old".to_string()), + _4m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_4m_old".to_string()), + _4y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_4y_old".to_string()), + _5m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_5m_old".to_string()), + _5y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_5y_old".to_string()), + _6m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_6m_old".to_string()), + _6y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_6y_old".to_string()), + _7y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_7y_old".to_string()), + _8y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_8y_old".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Term { + pub long: UpTo1dPattern, + pub short: UpTo1dPattern, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_Term { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + long: UpTo1dPattern::new(client.clone(), "lth".to_string()), + short: UpTo1dPattern::new(client.clone(), "sth".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Type { + pub empty: _0satsPattern2, + pub p2a: _0satsPattern2, + pub p2ms: _0satsPattern2, + pub p2pk33: _0satsPattern2, + pub p2pk65: _0satsPattern2, + pub p2pkh: _0satsPattern2, + pub p2sh: _0satsPattern2, + pub p2tr: _0satsPattern2, + pub p2wpkh: _0satsPattern2, + pub p2wsh: _0satsPattern2, + pub unknown: _0satsPattern2, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_Type { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + empty: _0satsPattern2::new(client.clone(), "empty_outputs".to_string()), + p2a: _0satsPattern2::new(client.clone(), "p2a".to_string()), + p2ms: _0satsPattern2::new(client.clone(), "p2ms".to_string()), + p2pk33: _0satsPattern2::new(client.clone(), "p2pk33".to_string()), + p2pk65: _0satsPattern2::new(client.clone(), "p2pk65".to_string()), + p2pkh: _0satsPattern2::new(client.clone(), "p2pkh".to_string()), + p2sh: _0satsPattern2::new(client.clone(), "p2sh".to_string()), + p2tr: _0satsPattern2::new(client.clone(), "p2tr".to_string()), + p2wpkh: _0satsPattern2::new(client.clone(), "p2wpkh".to_string()), + p2wsh: _0satsPattern2::new(client.clone(), "p2wsh".to_string()), + unknown: _0satsPattern2::new(client.clone(), "unknown_outputs".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Year { + pub _2009: _10yTo12yPattern, + pub _2010: _10yTo12yPattern, + pub _2011: _10yTo12yPattern, + pub _2012: _10yTo12yPattern, + pub _2013: _10yTo12yPattern, + pub _2014: _10yTo12yPattern, + pub _2015: _10yTo12yPattern, + pub _2016: _10yTo12yPattern, + pub _2017: _10yTo12yPattern, + pub _2018: _10yTo12yPattern, + pub _2019: _10yTo12yPattern, + pub _2020: _10yTo12yPattern, + pub _2021: _10yTo12yPattern, + pub _2022: _10yTo12yPattern, + pub _2023: _10yTo12yPattern, + pub _2024: _10yTo12yPattern, + pub _2025: _10yTo12yPattern, + pub _2026: _10yTo12yPattern, +} + +impl CatalogTree_Computed_Distribution_UtxoCohorts_Year { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _2009: _10yTo12yPattern::new(client.clone(), "year_2009".to_string()), + _2010: _10yTo12yPattern::new(client.clone(), "year_2010".to_string()), + _2011: _10yTo12yPattern::new(client.clone(), "year_2011".to_string()), + _2012: _10yTo12yPattern::new(client.clone(), "year_2012".to_string()), + _2013: _10yTo12yPattern::new(client.clone(), "year_2013".to_string()), + _2014: _10yTo12yPattern::new(client.clone(), "year_2014".to_string()), + _2015: _10yTo12yPattern::new(client.clone(), "year_2015".to_string()), + _2016: _10yTo12yPattern::new(client.clone(), "year_2016".to_string()), + _2017: _10yTo12yPattern::new(client.clone(), "year_2017".to_string()), + _2018: _10yTo12yPattern::new(client.clone(), "year_2018".to_string()), + _2019: _10yTo12yPattern::new(client.clone(), "year_2019".to_string()), + _2020: _10yTo12yPattern::new(client.clone(), "year_2020".to_string()), + _2021: _10yTo12yPattern::new(client.clone(), "year_2021".to_string()), + _2022: _10yTo12yPattern::new(client.clone(), "year_2022".to_string()), + _2023: _10yTo12yPattern::new(client.clone(), "year_2023".to_string()), + _2024: _10yTo12yPattern::new(client.clone(), "year_2024".to_string()), + _2025: _10yTo12yPattern::new(client.clone(), "year_2025".to_string()), + _2026: _10yTo12yPattern::new(client.clone(), "year_2026".to_string()), } } } @@ -2182,142 +5524,179 @@ pub struct CatalogTree_Computed_Indexes { } impl CatalogTree_Computed_Indexes { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - address: CatalogTree_Computed_Indexes_Address::new(client.clone(), &format!("{base_path}_address")), - block: CatalogTree_Computed_Indexes_Block::new(client.clone(), &format!("{base_path}_block")), - time: CatalogTree_Computed_Indexes_Time::new(client.clone(), &format!("{base_path}_time")), - transaction: CatalogTree_Computed_Indexes_Transaction::new(client.clone(), &format!("{base_path}_transaction")), + address: CatalogTree_Computed_Indexes_Address::new(client.clone(), format!("{base_path}_address")), + block: CatalogTree_Computed_Indexes_Block::new(client.clone(), format!("{base_path}_block")), + time: CatalogTree_Computed_Indexes_Time::new(client.clone(), format!("{base_path}_time")), + transaction: CatalogTree_Computed_Indexes_Transaction::new(client.clone(), format!("{base_path}_transaction")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Indexes_Address { - pub emptyoutputindex: MetricNode, - pub opreturnindex: MetricNode, - pub p2aaddressindex: Indexes14, - pub p2msoutputindex: MetricNode, - pub p2pk33addressindex: Indexes15, - pub p2pk65addressindex: Indexes16, - pub p2pkhaddressindex: Indexes17, - pub p2shaddressindex: Indexes18, - pub p2traddressindex: Indexes19, - pub p2wpkhaddressindex: Indexes20, - pub p2wshaddressindex: Indexes21, - pub unknownoutputindex: MetricNode, + pub emptyoutputindex: MetricPattern24, + pub opreturnindex: MetricPattern28, + pub p2aaddressindex: MetricPattern30, + pub p2msoutputindex: MetricPattern31, + pub p2pk33addressindex: MetricPattern32, + pub p2pk65addressindex: MetricPattern33, + pub p2pkhaddressindex: MetricPattern34, + pub p2shaddressindex: MetricPattern35, + pub p2traddressindex: MetricPattern36, + pub p2wpkhaddressindex: MetricPattern37, + pub p2wshaddressindex: MetricPattern38, + pub unknownoutputindex: MetricPattern42, } impl CatalogTree_Computed_Indexes_Address { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - emptyoutputindex: MetricNode::new(client.clone(), format!("{base_path}_emptyoutputindex")), - opreturnindex: MetricNode::new(client.clone(), format!("{base_path}_opreturnindex")), - p2aaddressindex: Indexes14::new(client.clone(), &format!("{base_path}_p2aaddressindex")), - p2msoutputindex: MetricNode::new(client.clone(), format!("{base_path}_p2msoutputindex")), - p2pk33addressindex: Indexes15::new(client.clone(), &format!("{base_path}_p2pk33addressindex")), - p2pk65addressindex: Indexes16::new(client.clone(), &format!("{base_path}_p2pk65addressindex")), - p2pkhaddressindex: Indexes17::new(client.clone(), &format!("{base_path}_p2pkhaddressindex")), - p2shaddressindex: Indexes18::new(client.clone(), &format!("{base_path}_p2shaddressindex")), - p2traddressindex: Indexes19::new(client.clone(), &format!("{base_path}_p2traddressindex")), - p2wpkhaddressindex: Indexes20::new(client.clone(), &format!("{base_path}_p2wpkhaddressindex")), - p2wshaddressindex: Indexes21::new(client.clone(), &format!("{base_path}_p2wshaddressindex")), - unknownoutputindex: MetricNode::new(client.clone(), format!("{base_path}_unknownoutputindex")), + emptyoutputindex: MetricPattern24::new(client.clone(), format!("{base_path}_emptyoutputindex")), + opreturnindex: MetricPattern28::new(client.clone(), format!("{base_path}_opreturnindex")), + p2aaddressindex: MetricPattern30::new(client.clone(), format!("{base_path}_p2aaddressindex")), + p2msoutputindex: MetricPattern31::new(client.clone(), format!("{base_path}_p2msoutputindex")), + p2pk33addressindex: MetricPattern32::new(client.clone(), format!("{base_path}_p2pk33addressindex")), + p2pk65addressindex: MetricPattern33::new(client.clone(), format!("{base_path}_p2pk65addressindex")), + p2pkhaddressindex: MetricPattern34::new(client.clone(), format!("{base_path}_p2pkhaddressindex")), + p2shaddressindex: MetricPattern35::new(client.clone(), format!("{base_path}_p2shaddressindex")), + p2traddressindex: MetricPattern36::new(client.clone(), format!("{base_path}_p2traddressindex")), + p2wpkhaddressindex: MetricPattern37::new(client.clone(), format!("{base_path}_p2wpkhaddressindex")), + p2wshaddressindex: MetricPattern38::new(client.clone(), format!("{base_path}_p2wshaddressindex")), + unknownoutputindex: MetricPattern42::new(client.clone(), format!("{base_path}_unknownoutputindex")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Indexes_Block { - pub date: Indexes2, - pub date_fixed: Indexes2, - pub dateindex: Indexes2, - pub difficultyepoch: MetricNode, - pub first_height: MetricNode, - pub halvingepoch: MetricNode, - pub height: Indexes2, - pub height_count: MetricNode, - pub timestamp_fixed: Indexes2, - pub txindex_count: Indexes2, + pub dateindex: MetricPattern25, + pub difficultyepoch: MetricPattern14, + pub first_height: MetricPattern13, + pub halvingepoch: MetricPattern15, + pub height: MetricPattern25, + pub height_count: MetricPattern23, + pub txindex_count: MetricPattern25, } impl CatalogTree_Computed_Indexes_Block { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - date: Indexes2::new(client.clone(), &format!("{base_path}_date")), - date_fixed: Indexes2::new(client.clone(), &format!("{base_path}_date_fixed")), - dateindex: Indexes2::new(client.clone(), &format!("{base_path}_dateindex")), - difficultyepoch: MetricNode::new(client.clone(), format!("{base_path}_difficultyepoch")), - first_height: MetricNode::new(client.clone(), format!("{base_path}_first_height")), - halvingepoch: MetricNode::new(client.clone(), format!("{base_path}_halvingepoch")), - height: Indexes2::new(client.clone(), &format!("{base_path}_height")), - height_count: MetricNode::new(client.clone(), format!("{base_path}_height_count")), - timestamp_fixed: Indexes2::new(client.clone(), &format!("{base_path}_timestamp_fixed")), - txindex_count: Indexes2::new(client.clone(), &format!("{base_path}_txindex_count")), + dateindex: MetricPattern25::new(client.clone(), format!("{base_path}_dateindex")), + difficultyepoch: MetricPattern14::new(client.clone(), format!("{base_path}_difficultyepoch")), + first_height: MetricPattern13::new(client.clone(), format!("{base_path}_first_height")), + halvingepoch: MetricPattern15::new(client.clone(), format!("{base_path}_halvingepoch")), + height: MetricPattern25::new(client.clone(), format!("{base_path}_height")), + height_count: MetricPattern23::new(client.clone(), format!("{base_path}_height_count")), + txindex_count: MetricPattern25::new(client.clone(), format!("{base_path}_txindex_count")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Indexes_Time { - pub date: Indexes5, - pub dateindex: Indexes5, - pub dateindex_count: Indexes22, - pub decadeindex: MetricNode, - pub first_dateindex: Indexes22, - pub first_height: Indexes5, - pub first_monthindex: Indexes23, - pub first_yearindex: Indexes7, - pub height_count: Indexes5, - pub monthindex: MetricNode, - pub monthindex_count: Indexes23, - pub quarterindex: MetricNode, - pub semesterindex: MetricNode, - pub weekindex: MetricNode, - pub yearindex: MetricNode, - pub yearindex_count: Indexes7, + pub date: MetricPattern21, + pub dateindex: MetricPattern21, + pub dateindex_count: MetricPattern19, + pub decadeindex: MetricPattern12, + pub first_dateindex: MetricPattern19, + pub first_height: MetricPattern21, + pub first_monthindex: MetricPattern8, + pub first_yearindex: MetricPattern22, + pub height_count: MetricPattern21, + pub monthindex: MetricPattern10, + pub monthindex_count: MetricPattern8, + pub quarterindex: MetricPattern17, + pub semesterindex: MetricPattern18, + pub weekindex: MetricPattern11, + pub yearindex: MetricPattern20, + pub yearindex_count: MetricPattern22, } impl CatalogTree_Computed_Indexes_Time { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - date: Indexes5::new(client.clone(), &format!("{base_path}_date")), - dateindex: Indexes5::new(client.clone(), &format!("{base_path}_dateindex")), - dateindex_count: Indexes22::new(client.clone(), &format!("{base_path}_dateindex_count")), - decadeindex: MetricNode::new(client.clone(), format!("{base_path}_decadeindex")), - first_dateindex: Indexes22::new(client.clone(), &format!("{base_path}_first_dateindex")), - first_height: Indexes5::new(client.clone(), &format!("{base_path}_first_height")), - first_monthindex: Indexes23::new(client.clone(), &format!("{base_path}_first_monthindex")), - first_yearindex: Indexes7::new(client.clone(), &format!("{base_path}_first_yearindex")), - height_count: Indexes5::new(client.clone(), &format!("{base_path}_height_count")), - monthindex: MetricNode::new(client.clone(), format!("{base_path}_monthindex")), - monthindex_count: Indexes23::new(client.clone(), &format!("{base_path}_monthindex_count")), - quarterindex: MetricNode::new(client.clone(), format!("{base_path}_quarterindex")), - semesterindex: MetricNode::new(client.clone(), format!("{base_path}_semesterindex")), - weekindex: MetricNode::new(client.clone(), format!("{base_path}_weekindex")), - yearindex: MetricNode::new(client.clone(), format!("{base_path}_yearindex")), - yearindex_count: Indexes7::new(client.clone(), &format!("{base_path}_yearindex_count")), + date: MetricPattern21::new(client.clone(), format!("{base_path}_date")), + dateindex: MetricPattern21::new(client.clone(), format!("{base_path}_dateindex")), + dateindex_count: MetricPattern19::new(client.clone(), format!("{base_path}_dateindex_count")), + decadeindex: MetricPattern12::new(client.clone(), format!("{base_path}_decadeindex")), + first_dateindex: MetricPattern19::new(client.clone(), format!("{base_path}_first_dateindex")), + first_height: MetricPattern21::new(client.clone(), format!("{base_path}_first_height")), + first_monthindex: MetricPattern8::new(client.clone(), format!("{base_path}_first_monthindex")), + first_yearindex: MetricPattern22::new(client.clone(), format!("{base_path}_first_yearindex")), + height_count: MetricPattern21::new(client.clone(), format!("{base_path}_height_count")), + monthindex: MetricPattern10::new(client.clone(), format!("{base_path}_monthindex")), + monthindex_count: MetricPattern8::new(client.clone(), format!("{base_path}_monthindex_count")), + quarterindex: MetricPattern17::new(client.clone(), format!("{base_path}_quarterindex")), + semesterindex: MetricPattern18::new(client.clone(), format!("{base_path}_semesterindex")), + weekindex: MetricPattern11::new(client.clone(), format!("{base_path}_weekindex")), + yearindex: MetricPattern20::new(client.clone(), format!("{base_path}_yearindex")), + yearindex_count: MetricPattern22::new(client.clone(), format!("{base_path}_yearindex_count")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Indexes_Transaction { - pub input_count: Indexes6, - pub output_count: Indexes6, - pub txindex: Indexes6, - pub txinindex: Indexes24, - pub txoutindex: Indexes25, + pub input_count: MetricPattern41, + pub output_count: MetricPattern41, + pub txindex: MetricPattern41, + pub txinindex: MetricPattern26, + pub txoutindex: MetricPattern29, } impl CatalogTree_Computed_Indexes_Transaction { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - input_count: Indexes6::new(client.clone(), &format!("{base_path}_input_count")), - output_count: Indexes6::new(client.clone(), &format!("{base_path}_output_count")), - txindex: Indexes6::new(client.clone(), &format!("{base_path}_txindex")), - txinindex: Indexes24::new(client.clone(), &format!("{base_path}_txinindex")), - txoutindex: Indexes25::new(client.clone(), &format!("{base_path}_txoutindex")), + input_count: MetricPattern41::new(client.clone(), format!("{base_path}_input_count")), + output_count: MetricPattern41::new(client.clone(), format!("{base_path}_output_count")), + txindex: MetricPattern41::new(client.clone(), format!("{base_path}_txindex")), + txinindex: MetricPattern26::new(client.clone(), format!("{base_path}_txinindex")), + txoutindex: MetricPattern29::new(client.clone(), format!("{base_path}_txoutindex")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Inputs { + pub count: CatalogTree_Computed_Inputs_Count, + pub spent: CatalogTree_Computed_Inputs_Spent, +} + +impl CatalogTree_Computed_Inputs { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + count: CatalogTree_Computed_Inputs_Count::new(client.clone(), format!("{base_path}_count")), + spent: CatalogTree_Computed_Inputs_Spent::new(client.clone(), format!("{base_path}_spent")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Inputs_Count { + pub count: BlockSizePattern, +} + +impl CatalogTree_Computed_Inputs_Count { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + count: BlockSizePattern::new(client.clone(), "input_count".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Inputs_Spent { + pub txoutindex: MetricPattern26, + pub value: MetricPattern26, +} + +impl CatalogTree_Computed_Inputs_Spent { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + txoutindex: MetricPattern26::new(client.clone(), format!("{base_path}_txoutindex")), + value: MetricPattern26::new(client.clone(), format!("{base_path}_value")), } } } @@ -2326,424 +5705,561 @@ impl CatalogTree_Computed_Indexes_Transaction { pub struct CatalogTree_Computed_Market { pub ath: CatalogTree_Computed_Market_Ath, pub dca: CatalogTree_Computed_Market_Dca, - pub history: CatalogTree_Computed_Market_History, + pub indicators: CatalogTree_Computed_Market_Indicators, + pub lookback: CatalogTree_Computed_Market_Lookback, pub moving_average: CatalogTree_Computed_Market_MovingAverage, pub range: CatalogTree_Computed_Market_Range, + pub returns: CatalogTree_Computed_Market_Returns, pub volatility: CatalogTree_Computed_Market_Volatility, } impl CatalogTree_Computed_Market { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - ath: CatalogTree_Computed_Market_Ath::new(client.clone(), &format!("{base_path}_ath")), - dca: CatalogTree_Computed_Market_Dca::new(client.clone(), &format!("{base_path}_dca")), - history: CatalogTree_Computed_Market_History::new(client.clone(), &format!("{base_path}_history")), - moving_average: CatalogTree_Computed_Market_MovingAverage::new(client.clone(), &format!("{base_path}_moving_average")), - range: CatalogTree_Computed_Market_Range::new(client.clone(), &format!("{base_path}_range")), - volatility: CatalogTree_Computed_Market_Volatility::new(client.clone(), &format!("{base_path}_volatility")), + ath: CatalogTree_Computed_Market_Ath::new(client.clone(), format!("{base_path}_ath")), + dca: CatalogTree_Computed_Market_Dca::new(client.clone(), format!("{base_path}_dca")), + indicators: CatalogTree_Computed_Market_Indicators::new(client.clone(), format!("{base_path}_indicators")), + lookback: CatalogTree_Computed_Market_Lookback::new(client.clone(), format!("{base_path}_lookback")), + moving_average: CatalogTree_Computed_Market_MovingAverage::new(client.clone(), format!("{base_path}_moving_average")), + range: CatalogTree_Computed_Market_Range::new(client.clone(), format!("{base_path}_range")), + returns: CatalogTree_Computed_Market_Returns::new(client.clone(), format!("{base_path}_returns")), + volatility: CatalogTree_Computed_Market_Volatility::new(client.clone(), format!("{base_path}_volatility")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Market_Ath { - pub days_since_price_ath: Indexes, - pub max_days_between_price_aths: Indexes, - pub max_years_between_price_aths: Indexes, - pub price_ath: Indexes26, - pub price_drawdown: Indexes26, + pub days_since_price_ath: MetricPattern4, + pub max_days_between_price_aths: MetricPattern4, + pub max_years_between_price_aths: MetricPattern4, + pub price_ath: MetricPattern3, + pub price_drawdown: MetricPattern3, } impl CatalogTree_Computed_Market_Ath { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - days_since_price_ath: Indexes::new(client.clone(), &format!("{base_path}_days_since_price_ath")), - max_days_between_price_aths: Indexes::new(client.clone(), &format!("{base_path}_max_days_between_price_aths")), - max_years_between_price_aths: Indexes::new(client.clone(), &format!("{base_path}_max_years_between_price_aths")), - price_ath: Indexes26::new(client.clone(), &format!("{base_path}_price_ath")), - price_drawdown: Indexes26::new(client.clone(), &format!("{base_path}_price_drawdown")), + days_since_price_ath: MetricPattern4::new(client.clone(), format!("{base_path}_days_since_price_ath")), + max_days_between_price_aths: MetricPattern4::new(client.clone(), format!("{base_path}_max_days_between_price_aths")), + max_years_between_price_aths: MetricPattern4::new(client.clone(), format!("{base_path}_max_years_between_price_aths")), + price_ath: MetricPattern3::new(client.clone(), format!("{base_path}_price_ath")), + price_drawdown: MetricPattern3::new(client.clone(), format!("{base_path}_price_drawdown")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Market_Dca { - pub _10y_dca_avg_price: Indexes, - pub _10y_dca_cagr: Indexes, - pub _10y_dca_returns: Indexes, - pub _10y_dca_stack: Indexes, - pub _1m_dca_avg_price: Indexes, - pub _1m_dca_returns: Indexes, - pub _1m_dca_stack: Indexes, - pub _1w_dca_avg_price: Indexes, - pub _1w_dca_returns: Indexes, - pub _1w_dca_stack: Indexes, - pub _1y_dca_avg_price: Indexes, - pub _1y_dca_returns: Indexes, - pub _1y_dca_stack: Indexes, - pub _2y_dca_avg_price: Indexes, - pub _2y_dca_cagr: Indexes, - pub _2y_dca_returns: Indexes, - pub _2y_dca_stack: Indexes, - pub _3m_dca_avg_price: Indexes, - pub _3m_dca_returns: Indexes, - pub _3m_dca_stack: Indexes, - pub _3y_dca_avg_price: Indexes, - pub _3y_dca_cagr: Indexes, - pub _3y_dca_returns: Indexes, - pub _3y_dca_stack: Indexes, - pub _4y_dca_avg_price: Indexes, - pub _4y_dca_cagr: Indexes, - pub _4y_dca_returns: Indexes, - pub _4y_dca_stack: Indexes, - pub _5y_dca_avg_price: Indexes, - pub _5y_dca_cagr: Indexes, - pub _5y_dca_returns: Indexes, - pub _5y_dca_stack: Indexes, - pub _6m_dca_avg_price: Indexes, - pub _6m_dca_returns: Indexes, - pub _6m_dca_stack: Indexes, - pub _6y_dca_avg_price: Indexes, - pub _6y_dca_cagr: Indexes, - pub _6y_dca_returns: Indexes, - pub _6y_dca_stack: Indexes, - pub _8y_dca_avg_price: Indexes, - pub _8y_dca_cagr: Indexes, - pub _8y_dca_returns: Indexes, - pub _8y_dca_stack: Indexes, - pub dca_class_2015_avg_price: Indexes, - pub dca_class_2015_returns: Indexes, - pub dca_class_2015_stack: Indexes, - pub dca_class_2016_avg_price: Indexes, - pub dca_class_2016_returns: Indexes, - pub dca_class_2016_stack: Indexes, - pub dca_class_2017_avg_price: Indexes, - pub dca_class_2017_returns: Indexes, - pub dca_class_2017_stack: Indexes, - pub dca_class_2018_avg_price: Indexes, - pub dca_class_2018_returns: Indexes, - pub dca_class_2018_stack: Indexes, - pub dca_class_2019_avg_price: Indexes, - pub dca_class_2019_returns: Indexes, - pub dca_class_2019_stack: Indexes, - pub dca_class_2020_avg_price: Indexes, - pub dca_class_2020_returns: Indexes, - pub dca_class_2020_stack: Indexes, - pub dca_class_2021_avg_price: Indexes, - pub dca_class_2021_returns: Indexes, - pub dca_class_2021_stack: Indexes, - pub dca_class_2022_avg_price: Indexes, - pub dca_class_2022_returns: Indexes, - pub dca_class_2022_stack: Indexes, - pub dca_class_2023_avg_price: Indexes, - pub dca_class_2023_returns: Indexes, - pub dca_class_2023_stack: Indexes, - pub dca_class_2024_avg_price: Indexes, - pub dca_class_2024_returns: Indexes, - pub dca_class_2024_stack: Indexes, - pub dca_class_2025_avg_price: Indexes, - pub dca_class_2025_returns: Indexes, - pub dca_class_2025_stack: Indexes, + pub _10y_dca_avg_price: MetricPattern4, + pub _10y_dca_cagr: MetricPattern4, + pub _10y_dca_returns: MetricPattern4, + pub _10y_dca_stack: MetricPattern4, + pub _1m_dca_avg_price: MetricPattern4, + pub _1m_dca_returns: MetricPattern4, + pub _1m_dca_stack: MetricPattern4, + pub _1w_dca_avg_price: MetricPattern4, + pub _1w_dca_returns: MetricPattern4, + pub _1w_dca_stack: MetricPattern4, + pub _1y_dca_avg_price: MetricPattern4, + pub _1y_dca_returns: MetricPattern4, + pub _1y_dca_stack: MetricPattern4, + pub _2y_dca_avg_price: MetricPattern4, + pub _2y_dca_cagr: MetricPattern4, + pub _2y_dca_returns: MetricPattern4, + pub _2y_dca_stack: MetricPattern4, + pub _3m_dca_avg_price: MetricPattern4, + pub _3m_dca_returns: MetricPattern4, + pub _3m_dca_stack: MetricPattern4, + pub _3y_dca_avg_price: MetricPattern4, + pub _3y_dca_cagr: MetricPattern4, + pub _3y_dca_returns: MetricPattern4, + pub _3y_dca_stack: MetricPattern4, + pub _4y_dca_avg_price: MetricPattern4, + pub _4y_dca_cagr: MetricPattern4, + pub _4y_dca_returns: MetricPattern4, + pub _4y_dca_stack: MetricPattern4, + pub _5y_dca_avg_price: MetricPattern4, + pub _5y_dca_cagr: MetricPattern4, + pub _5y_dca_returns: MetricPattern4, + pub _5y_dca_stack: MetricPattern4, + pub _6m_dca_avg_price: MetricPattern4, + pub _6m_dca_returns: MetricPattern4, + pub _6m_dca_stack: MetricPattern4, + pub _6y_dca_avg_price: MetricPattern4, + pub _6y_dca_cagr: MetricPattern4, + pub _6y_dca_returns: MetricPattern4, + pub _6y_dca_stack: MetricPattern4, + pub _8y_dca_avg_price: MetricPattern4, + pub _8y_dca_cagr: MetricPattern4, + pub _8y_dca_returns: MetricPattern4, + pub _8y_dca_stack: MetricPattern4, + pub dca_class_2015_avg_price: MetricPattern4, + pub dca_class_2015_returns: MetricPattern4, + pub dca_class_2015_stack: MetricPattern4, + pub dca_class_2016_avg_price: MetricPattern4, + pub dca_class_2016_returns: MetricPattern4, + pub dca_class_2016_stack: MetricPattern4, + pub dca_class_2017_avg_price: MetricPattern4, + pub dca_class_2017_returns: MetricPattern4, + pub dca_class_2017_stack: MetricPattern4, + pub dca_class_2018_avg_price: MetricPattern4, + pub dca_class_2018_returns: MetricPattern4, + pub dca_class_2018_stack: MetricPattern4, + pub dca_class_2019_avg_price: MetricPattern4, + pub dca_class_2019_returns: MetricPattern4, + pub dca_class_2019_stack: MetricPattern4, + pub dca_class_2020_avg_price: MetricPattern4, + pub dca_class_2020_returns: MetricPattern4, + pub dca_class_2020_stack: MetricPattern4, + pub dca_class_2021_avg_price: MetricPattern4, + pub dca_class_2021_returns: MetricPattern4, + pub dca_class_2021_stack: MetricPattern4, + pub dca_class_2022_avg_price: MetricPattern4, + pub dca_class_2022_returns: MetricPattern4, + pub dca_class_2022_stack: MetricPattern4, + pub dca_class_2023_avg_price: MetricPattern4, + pub dca_class_2023_returns: MetricPattern4, + pub dca_class_2023_stack: MetricPattern4, + pub dca_class_2024_avg_price: MetricPattern4, + pub dca_class_2024_returns: MetricPattern4, + pub dca_class_2024_stack: MetricPattern4, + pub dca_class_2025_avg_price: MetricPattern4, + pub dca_class_2025_returns: MetricPattern4, + pub dca_class_2025_stack: MetricPattern4, } impl CatalogTree_Computed_Market_Dca { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _10y_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__10y_dca_avg_price")), - _10y_dca_cagr: Indexes::new(client.clone(), &format!("{base_path}__10y_dca_cagr")), - _10y_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__10y_dca_returns")), - _10y_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__10y_dca_stack")), - _1m_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__1m_dca_avg_price")), - _1m_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__1m_dca_returns")), - _1m_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__1m_dca_stack")), - _1w_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__1w_dca_avg_price")), - _1w_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__1w_dca_returns")), - _1w_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__1w_dca_stack")), - _1y_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__1y_dca_avg_price")), - _1y_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__1y_dca_returns")), - _1y_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__1y_dca_stack")), - _2y_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__2y_dca_avg_price")), - _2y_dca_cagr: Indexes::new(client.clone(), &format!("{base_path}__2y_dca_cagr")), - _2y_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__2y_dca_returns")), - _2y_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__2y_dca_stack")), - _3m_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__3m_dca_avg_price")), - _3m_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__3m_dca_returns")), - _3m_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__3m_dca_stack")), - _3y_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__3y_dca_avg_price")), - _3y_dca_cagr: Indexes::new(client.clone(), &format!("{base_path}__3y_dca_cagr")), - _3y_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__3y_dca_returns")), - _3y_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__3y_dca_stack")), - _4y_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__4y_dca_avg_price")), - _4y_dca_cagr: Indexes::new(client.clone(), &format!("{base_path}__4y_dca_cagr")), - _4y_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__4y_dca_returns")), - _4y_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__4y_dca_stack")), - _5y_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__5y_dca_avg_price")), - _5y_dca_cagr: Indexes::new(client.clone(), &format!("{base_path}__5y_dca_cagr")), - _5y_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__5y_dca_returns")), - _5y_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__5y_dca_stack")), - _6m_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__6m_dca_avg_price")), - _6m_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__6m_dca_returns")), - _6m_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__6m_dca_stack")), - _6y_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__6y_dca_avg_price")), - _6y_dca_cagr: Indexes::new(client.clone(), &format!("{base_path}__6y_dca_cagr")), - _6y_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__6y_dca_returns")), - _6y_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__6y_dca_stack")), - _8y_dca_avg_price: Indexes::new(client.clone(), &format!("{base_path}__8y_dca_avg_price")), - _8y_dca_cagr: Indexes::new(client.clone(), &format!("{base_path}__8y_dca_cagr")), - _8y_dca_returns: Indexes::new(client.clone(), &format!("{base_path}__8y_dca_returns")), - _8y_dca_stack: Indexes::new(client.clone(), &format!("{base_path}__8y_dca_stack")), - dca_class_2015_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2015_avg_price")), - dca_class_2015_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2015_returns")), - dca_class_2015_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2015_stack")), - dca_class_2016_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2016_avg_price")), - dca_class_2016_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2016_returns")), - dca_class_2016_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2016_stack")), - dca_class_2017_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2017_avg_price")), - dca_class_2017_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2017_returns")), - dca_class_2017_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2017_stack")), - dca_class_2018_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2018_avg_price")), - dca_class_2018_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2018_returns")), - dca_class_2018_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2018_stack")), - dca_class_2019_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2019_avg_price")), - dca_class_2019_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2019_returns")), - dca_class_2019_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2019_stack")), - dca_class_2020_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2020_avg_price")), - dca_class_2020_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2020_returns")), - dca_class_2020_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2020_stack")), - dca_class_2021_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2021_avg_price")), - dca_class_2021_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2021_returns")), - dca_class_2021_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2021_stack")), - dca_class_2022_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2022_avg_price")), - dca_class_2022_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2022_returns")), - dca_class_2022_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2022_stack")), - dca_class_2023_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2023_avg_price")), - dca_class_2023_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2023_returns")), - dca_class_2023_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2023_stack")), - dca_class_2024_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2024_avg_price")), - dca_class_2024_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2024_returns")), - dca_class_2024_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2024_stack")), - dca_class_2025_avg_price: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2025_avg_price")), - dca_class_2025_returns: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2025_returns")), - dca_class_2025_stack: Indexes::new(client.clone(), &format!("{base_path}_dca_class_2025_stack")), + _10y_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_10y_dca_avg_price")), + _10y_dca_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_10y_dca_cagr")), + _10y_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_10y_dca_returns")), + _10y_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_10y_dca_stack")), + _1m_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_1m_dca_avg_price")), + _1m_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_1m_dca_returns")), + _1m_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_1m_dca_stack")), + _1w_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_1w_dca_avg_price")), + _1w_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_1w_dca_returns")), + _1w_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_1w_dca_stack")), + _1y_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_1y_dca_avg_price")), + _1y_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_1y_dca_returns")), + _1y_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_1y_dca_stack")), + _2y_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_2y_dca_avg_price")), + _2y_dca_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_2y_dca_cagr")), + _2y_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_2y_dca_returns")), + _2y_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_2y_dca_stack")), + _3m_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_3m_dca_avg_price")), + _3m_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_3m_dca_returns")), + _3m_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_3m_dca_stack")), + _3y_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_3y_dca_avg_price")), + _3y_dca_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_3y_dca_cagr")), + _3y_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_3y_dca_returns")), + _3y_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_3y_dca_stack")), + _4y_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_4y_dca_avg_price")), + _4y_dca_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_4y_dca_cagr")), + _4y_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_4y_dca_returns")), + _4y_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_4y_dca_stack")), + _5y_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_5y_dca_avg_price")), + _5y_dca_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_5y_dca_cagr")), + _5y_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_5y_dca_returns")), + _5y_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_5y_dca_stack")), + _6m_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_6m_dca_avg_price")), + _6m_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_6m_dca_returns")), + _6m_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_6m_dca_stack")), + _6y_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_6y_dca_avg_price")), + _6y_dca_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_6y_dca_cagr")), + _6y_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_6y_dca_returns")), + _6y_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_6y_dca_stack")), + _8y_dca_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_8y_dca_avg_price")), + _8y_dca_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_8y_dca_cagr")), + _8y_dca_returns: MetricPattern4::new(client.clone(), format!("{base_path}_8y_dca_returns")), + _8y_dca_stack: MetricPattern4::new(client.clone(), format!("{base_path}_8y_dca_stack")), + dca_class_2015_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2015_avg_price")), + dca_class_2015_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2015_returns")), + dca_class_2015_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2015_stack")), + dca_class_2016_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2016_avg_price")), + dca_class_2016_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2016_returns")), + dca_class_2016_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2016_stack")), + dca_class_2017_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2017_avg_price")), + dca_class_2017_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2017_returns")), + dca_class_2017_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2017_stack")), + dca_class_2018_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2018_avg_price")), + dca_class_2018_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2018_returns")), + dca_class_2018_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2018_stack")), + dca_class_2019_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2019_avg_price")), + dca_class_2019_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2019_returns")), + dca_class_2019_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2019_stack")), + dca_class_2020_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2020_avg_price")), + dca_class_2020_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2020_returns")), + dca_class_2020_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2020_stack")), + dca_class_2021_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2021_avg_price")), + dca_class_2021_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2021_returns")), + dca_class_2021_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2021_stack")), + dca_class_2022_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2022_avg_price")), + dca_class_2022_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2022_returns")), + dca_class_2022_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2022_stack")), + dca_class_2023_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2023_avg_price")), + dca_class_2023_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2023_returns")), + dca_class_2023_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2023_stack")), + dca_class_2024_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2024_avg_price")), + dca_class_2024_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2024_returns")), + dca_class_2024_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2024_stack")), + dca_class_2025_avg_price: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2025_avg_price")), + dca_class_2025_returns: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2025_returns")), + dca_class_2025_stack: MetricPattern4::new(client.clone(), format!("{base_path}_dca_class_2025_stack")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_History { - pub _10y_cagr: Indexes, - pub _10y_price_returns: Indexes, - pub _1d_price_returns: Indexes, - pub _1m_price_returns: Indexes, - pub _1w_price_returns: Indexes, - pub _1y_price_returns: Indexes, - pub _2y_cagr: Indexes, - pub _2y_price_returns: Indexes, - pub _3m_price_returns: Indexes, - pub _3y_cagr: Indexes, - pub _3y_price_returns: Indexes, - pub _4y_cagr: Indexes, - pub _4y_price_returns: Indexes, - pub _5y_cagr: Indexes, - pub _5y_price_returns: Indexes, - pub _6m_price_returns: Indexes, - pub _6y_cagr: Indexes, - pub _6y_price_returns: Indexes, - pub _8y_cagr: Indexes, - pub _8y_price_returns: Indexes, - pub price_10y_ago: Indexes, - pub price_1d_ago: Indexes, - pub price_1m_ago: Indexes, - pub price_1w_ago: Indexes, - pub price_1y_ago: Indexes, - pub price_2y_ago: Indexes, - pub price_3m_ago: Indexes, - pub price_3y_ago: Indexes, - pub price_4y_ago: Indexes, - pub price_5y_ago: Indexes, - pub price_6m_ago: Indexes, - pub price_6y_ago: Indexes, - pub price_8y_ago: Indexes, +pub struct CatalogTree_Computed_Market_Indicators { + pub gini: MetricPattern21, + pub macd_histogram: MetricPattern21, + pub macd_line: MetricPattern21, + pub macd_signal: MetricPattern21, + pub nvt: MetricPattern21, + pub pi_cycle: MetricPattern21, + pub puell_multiple: MetricPattern4, + pub rsi_14d: MetricPattern21, + pub rsi_14d_max: MetricPattern21, + pub rsi_14d_min: MetricPattern21, + pub rsi_avg_gain_14d: MetricPattern21, + pub rsi_avg_loss_14d: MetricPattern21, + pub rsi_gains: MetricPattern21, + pub rsi_losses: MetricPattern21, + pub stoch_d: MetricPattern21, + pub stoch_k: MetricPattern21, + pub stoch_rsi: MetricPattern21, + pub stoch_rsi_d: MetricPattern21, + pub stoch_rsi_k: MetricPattern21, } -impl CatalogTree_Computed_Market_History { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Market_Indicators { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _10y_cagr: Indexes::new(client.clone(), &format!("{base_path}__10y_cagr")), - _10y_price_returns: Indexes::new(client.clone(), &format!("{base_path}__10y_price_returns")), - _1d_price_returns: Indexes::new(client.clone(), &format!("{base_path}__1d_price_returns")), - _1m_price_returns: Indexes::new(client.clone(), &format!("{base_path}__1m_price_returns")), - _1w_price_returns: Indexes::new(client.clone(), &format!("{base_path}__1w_price_returns")), - _1y_price_returns: Indexes::new(client.clone(), &format!("{base_path}__1y_price_returns")), - _2y_cagr: Indexes::new(client.clone(), &format!("{base_path}__2y_cagr")), - _2y_price_returns: Indexes::new(client.clone(), &format!("{base_path}__2y_price_returns")), - _3m_price_returns: Indexes::new(client.clone(), &format!("{base_path}__3m_price_returns")), - _3y_cagr: Indexes::new(client.clone(), &format!("{base_path}__3y_cagr")), - _3y_price_returns: Indexes::new(client.clone(), &format!("{base_path}__3y_price_returns")), - _4y_cagr: Indexes::new(client.clone(), &format!("{base_path}__4y_cagr")), - _4y_price_returns: Indexes::new(client.clone(), &format!("{base_path}__4y_price_returns")), - _5y_cagr: Indexes::new(client.clone(), &format!("{base_path}__5y_cagr")), - _5y_price_returns: Indexes::new(client.clone(), &format!("{base_path}__5y_price_returns")), - _6m_price_returns: Indexes::new(client.clone(), &format!("{base_path}__6m_price_returns")), - _6y_cagr: Indexes::new(client.clone(), &format!("{base_path}__6y_cagr")), - _6y_price_returns: Indexes::new(client.clone(), &format!("{base_path}__6y_price_returns")), - _8y_cagr: Indexes::new(client.clone(), &format!("{base_path}__8y_cagr")), - _8y_price_returns: Indexes::new(client.clone(), &format!("{base_path}__8y_price_returns")), - price_10y_ago: Indexes::new(client.clone(), &format!("{base_path}_price_10y_ago")), - price_1d_ago: Indexes::new(client.clone(), &format!("{base_path}_price_1d_ago")), - price_1m_ago: Indexes::new(client.clone(), &format!("{base_path}_price_1m_ago")), - price_1w_ago: Indexes::new(client.clone(), &format!("{base_path}_price_1w_ago")), - price_1y_ago: Indexes::new(client.clone(), &format!("{base_path}_price_1y_ago")), - price_2y_ago: Indexes::new(client.clone(), &format!("{base_path}_price_2y_ago")), - price_3m_ago: Indexes::new(client.clone(), &format!("{base_path}_price_3m_ago")), - price_3y_ago: Indexes::new(client.clone(), &format!("{base_path}_price_3y_ago")), - price_4y_ago: Indexes::new(client.clone(), &format!("{base_path}_price_4y_ago")), - price_5y_ago: Indexes::new(client.clone(), &format!("{base_path}_price_5y_ago")), - price_6m_ago: Indexes::new(client.clone(), &format!("{base_path}_price_6m_ago")), - price_6y_ago: Indexes::new(client.clone(), &format!("{base_path}_price_6y_ago")), - price_8y_ago: Indexes::new(client.clone(), &format!("{base_path}_price_8y_ago")), + gini: MetricPattern21::new(client.clone(), format!("{base_path}_gini")), + macd_histogram: MetricPattern21::new(client.clone(), format!("{base_path}_macd_histogram")), + macd_line: MetricPattern21::new(client.clone(), format!("{base_path}_macd_line")), + macd_signal: MetricPattern21::new(client.clone(), format!("{base_path}_macd_signal")), + nvt: MetricPattern21::new(client.clone(), format!("{base_path}_nvt")), + pi_cycle: MetricPattern21::new(client.clone(), format!("{base_path}_pi_cycle")), + puell_multiple: MetricPattern4::new(client.clone(), format!("{base_path}_puell_multiple")), + rsi_14d: MetricPattern21::new(client.clone(), format!("{base_path}_rsi_14d")), + rsi_14d_max: MetricPattern21::new(client.clone(), format!("{base_path}_rsi_14d_max")), + rsi_14d_min: MetricPattern21::new(client.clone(), format!("{base_path}_rsi_14d_min")), + rsi_avg_gain_14d: MetricPattern21::new(client.clone(), format!("{base_path}_rsi_avg_gain_14d")), + rsi_avg_loss_14d: MetricPattern21::new(client.clone(), format!("{base_path}_rsi_avg_loss_14d")), + rsi_gains: MetricPattern21::new(client.clone(), format!("{base_path}_rsi_gains")), + rsi_losses: MetricPattern21::new(client.clone(), format!("{base_path}_rsi_losses")), + stoch_d: MetricPattern21::new(client.clone(), format!("{base_path}_stoch_d")), + stoch_k: MetricPattern21::new(client.clone(), format!("{base_path}_stoch_k")), + stoch_rsi: MetricPattern21::new(client.clone(), format!("{base_path}_stoch_rsi")), + stoch_rsi_d: MetricPattern21::new(client.clone(), format!("{base_path}_stoch_rsi_d")), + stoch_rsi_k: MetricPattern21::new(client.clone(), format!("{base_path}_stoch_rsi_k")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Market_Lookback { + pub price_10y_ago: MetricPattern4, + pub price_1d_ago: MetricPattern4, + pub price_1m_ago: MetricPattern4, + pub price_1w_ago: MetricPattern4, + pub price_1y_ago: MetricPattern4, + pub price_2y_ago: MetricPattern4, + pub price_3m_ago: MetricPattern4, + pub price_3y_ago: MetricPattern4, + pub price_4y_ago: MetricPattern4, + pub price_5y_ago: MetricPattern4, + pub price_6m_ago: MetricPattern4, + pub price_6y_ago: MetricPattern4, + pub price_8y_ago: MetricPattern4, +} + +impl CatalogTree_Computed_Market_Lookback { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + price_10y_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_10y_ago")), + price_1d_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_1d_ago")), + price_1m_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_1m_ago")), + price_1w_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_1w_ago")), + price_1y_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_1y_ago")), + price_2y_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_2y_ago")), + price_3m_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_3m_ago")), + price_3y_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_3y_ago")), + price_4y_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_4y_ago")), + price_5y_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_5y_ago")), + price_6m_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_6m_ago")), + price_6y_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_6y_ago")), + price_8y_ago: MetricPattern4::new(client.clone(), format!("{base_path}_price_8y_ago")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Market_MovingAverage { - pub price_13d_ema: Price13dEmaPattern, - pub price_13d_sma: Price13dEmaPattern, - pub price_144d_ema: Price13dEmaPattern, - pub price_144d_sma: Price13dEmaPattern, - pub price_1m_ema: Price13dEmaPattern, - pub price_1m_sma: Price13dEmaPattern, - pub price_1w_ema: Price13dEmaPattern, - pub price_1w_sma: Price13dEmaPattern, - pub price_1y_ema: Price13dEmaPattern, - pub price_1y_sma: Price13dEmaPattern, - pub price_200d_ema: Price13dEmaPattern, - pub price_200d_sma: Price13dEmaPattern, - pub price_200d_sma_x0_8: Indexes, - pub price_200d_sma_x2_4: Indexes, - pub price_200w_ema: Price13dEmaPattern, - pub price_200w_sma: Price13dEmaPattern, - pub price_21d_ema: Price13dEmaPattern, - pub price_21d_sma: Price13dEmaPattern, - pub price_2y_ema: Price13dEmaPattern, - pub price_2y_sma: Price13dEmaPattern, - pub price_34d_ema: Price13dEmaPattern, - pub price_34d_sma: Price13dEmaPattern, - pub price_4y_ema: Price13dEmaPattern, - pub price_4y_sma: Price13dEmaPattern, - pub price_55d_ema: Price13dEmaPattern, - pub price_55d_sma: Price13dEmaPattern, - pub price_89d_ema: Price13dEmaPattern, - pub price_89d_sma: Price13dEmaPattern, - pub price_8d_ema: Price13dEmaPattern, - pub price_8d_sma: Price13dEmaPattern, + pub price_111d_sma: Price111dSmaPattern, + pub price_12d_ema: Price111dSmaPattern, + pub price_13d_ema: Price111dSmaPattern, + pub price_13d_sma: Price111dSmaPattern, + pub price_144d_ema: Price111dSmaPattern, + pub price_144d_sma: Price111dSmaPattern, + pub price_1m_ema: Price111dSmaPattern, + pub price_1m_sma: Price111dSmaPattern, + pub price_1w_ema: Price111dSmaPattern, + pub price_1w_sma: Price111dSmaPattern, + pub price_1y_ema: Price111dSmaPattern, + pub price_1y_sma: Price111dSmaPattern, + pub price_200d_ema: Price111dSmaPattern, + pub price_200d_sma: Price111dSmaPattern, + pub price_200d_sma_x0_8: MetricPattern4, + pub price_200d_sma_x2_4: MetricPattern4, + pub price_200w_ema: Price111dSmaPattern, + pub price_200w_sma: Price111dSmaPattern, + pub price_21d_ema: Price111dSmaPattern, + pub price_21d_sma: Price111dSmaPattern, + pub price_26d_ema: Price111dSmaPattern, + pub price_2y_ema: Price111dSmaPattern, + pub price_2y_sma: Price111dSmaPattern, + pub price_34d_ema: Price111dSmaPattern, + pub price_34d_sma: Price111dSmaPattern, + pub price_350d_sma: Price111dSmaPattern, + pub price_350d_sma_x2: MetricPattern4, + pub price_4y_ema: Price111dSmaPattern, + pub price_4y_sma: Price111dSmaPattern, + pub price_55d_ema: Price111dSmaPattern, + pub price_55d_sma: Price111dSmaPattern, + pub price_89d_ema: Price111dSmaPattern, + pub price_89d_sma: Price111dSmaPattern, + pub price_8d_ema: Price111dSmaPattern, + pub price_8d_sma: Price111dSmaPattern, } impl CatalogTree_Computed_Market_MovingAverage { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - price_13d_ema: Price13dEmaPattern::new(client.clone(), "price_13d_ema"), - price_13d_sma: Price13dEmaPattern::new(client.clone(), "price_13d_sma"), - price_144d_ema: Price13dEmaPattern::new(client.clone(), "price_144d_ema"), - price_144d_sma: Price13dEmaPattern::new(client.clone(), "price_144d_sma"), - price_1m_ema: Price13dEmaPattern::new(client.clone(), "price_1m_ema"), - price_1m_sma: Price13dEmaPattern::new(client.clone(), "price_1m_sma"), - price_1w_ema: Price13dEmaPattern::new(client.clone(), "price_1w_ema"), - price_1w_sma: Price13dEmaPattern::new(client.clone(), "price_1w_sma"), - price_1y_ema: Price13dEmaPattern::new(client.clone(), "price_1y_ema"), - price_1y_sma: Price13dEmaPattern::new(client.clone(), "price_1y_sma"), - price_200d_ema: Price13dEmaPattern::new(client.clone(), "price_200d_ema"), - price_200d_sma: Price13dEmaPattern::new(client.clone(), "price_200d_sma"), - price_200d_sma_x0_8: Indexes::new(client.clone(), &format!("{base_path}_price_200d_sma_x0_8")), - price_200d_sma_x2_4: Indexes::new(client.clone(), &format!("{base_path}_price_200d_sma_x2_4")), - price_200w_ema: Price13dEmaPattern::new(client.clone(), "price_200w_ema"), - price_200w_sma: Price13dEmaPattern::new(client.clone(), "price_200w_sma"), - price_21d_ema: Price13dEmaPattern::new(client.clone(), "price_21d_ema"), - price_21d_sma: Price13dEmaPattern::new(client.clone(), "price_21d_sma"), - price_2y_ema: Price13dEmaPattern::new(client.clone(), "price_2y_ema"), - price_2y_sma: Price13dEmaPattern::new(client.clone(), "price_2y_sma"), - price_34d_ema: Price13dEmaPattern::new(client.clone(), "price_34d_ema"), - price_34d_sma: Price13dEmaPattern::new(client.clone(), "price_34d_sma"), - price_4y_ema: Price13dEmaPattern::new(client.clone(), "price_4y_ema"), - price_4y_sma: Price13dEmaPattern::new(client.clone(), "price_4y_sma"), - price_55d_ema: Price13dEmaPattern::new(client.clone(), "price_55d_ema"), - price_55d_sma: Price13dEmaPattern::new(client.clone(), "price_55d_sma"), - price_89d_ema: Price13dEmaPattern::new(client.clone(), "price_89d_ema"), - price_89d_sma: Price13dEmaPattern::new(client.clone(), "price_89d_sma"), - price_8d_ema: Price13dEmaPattern::new(client.clone(), "price_8d_ema"), - price_8d_sma: Price13dEmaPattern::new(client.clone(), "price_8d_sma"), + price_111d_sma: Price111dSmaPattern::new(client.clone(), "price_111d_sma".to_string()), + price_12d_ema: Price111dSmaPattern::new(client.clone(), "price_12d_ema".to_string()), + price_13d_ema: Price111dSmaPattern::new(client.clone(), "price_13d_ema".to_string()), + price_13d_sma: Price111dSmaPattern::new(client.clone(), "price_13d_sma".to_string()), + price_144d_ema: Price111dSmaPattern::new(client.clone(), "price_144d_ema".to_string()), + price_144d_sma: Price111dSmaPattern::new(client.clone(), "price_144d_sma".to_string()), + price_1m_ema: Price111dSmaPattern::new(client.clone(), "price_1m_ema".to_string()), + price_1m_sma: Price111dSmaPattern::new(client.clone(), "price_1m_sma".to_string()), + price_1w_ema: Price111dSmaPattern::new(client.clone(), "price_1w_ema".to_string()), + price_1w_sma: Price111dSmaPattern::new(client.clone(), "price_1w_sma".to_string()), + price_1y_ema: Price111dSmaPattern::new(client.clone(), "price_1y_ema".to_string()), + price_1y_sma: Price111dSmaPattern::new(client.clone(), "price_1y_sma".to_string()), + price_200d_ema: Price111dSmaPattern::new(client.clone(), "price_200d_ema".to_string()), + price_200d_sma: Price111dSmaPattern::new(client.clone(), "price_200d_sma".to_string()), + price_200d_sma_x0_8: MetricPattern4::new(client.clone(), format!("{base_path}_price_200d_sma_x0_8")), + price_200d_sma_x2_4: MetricPattern4::new(client.clone(), format!("{base_path}_price_200d_sma_x2_4")), + price_200w_ema: Price111dSmaPattern::new(client.clone(), "price_200w_ema".to_string()), + price_200w_sma: Price111dSmaPattern::new(client.clone(), "price_200w_sma".to_string()), + price_21d_ema: Price111dSmaPattern::new(client.clone(), "price_21d_ema".to_string()), + price_21d_sma: Price111dSmaPattern::new(client.clone(), "price_21d_sma".to_string()), + price_26d_ema: Price111dSmaPattern::new(client.clone(), "price_26d_ema".to_string()), + price_2y_ema: Price111dSmaPattern::new(client.clone(), "price_2y_ema".to_string()), + price_2y_sma: Price111dSmaPattern::new(client.clone(), "price_2y_sma".to_string()), + price_34d_ema: Price111dSmaPattern::new(client.clone(), "price_34d_ema".to_string()), + price_34d_sma: Price111dSmaPattern::new(client.clone(), "price_34d_sma".to_string()), + price_350d_sma: Price111dSmaPattern::new(client.clone(), "price_350d_sma".to_string()), + price_350d_sma_x2: MetricPattern4::new(client.clone(), format!("{base_path}_price_350d_sma_x2")), + price_4y_ema: Price111dSmaPattern::new(client.clone(), "price_4y_ema".to_string()), + price_4y_sma: Price111dSmaPattern::new(client.clone(), "price_4y_sma".to_string()), + price_55d_ema: Price111dSmaPattern::new(client.clone(), "price_55d_ema".to_string()), + price_55d_sma: Price111dSmaPattern::new(client.clone(), "price_55d_sma".to_string()), + price_89d_ema: Price111dSmaPattern::new(client.clone(), "price_89d_ema".to_string()), + price_89d_sma: Price111dSmaPattern::new(client.clone(), "price_89d_sma".to_string()), + price_8d_ema: Price111dSmaPattern::new(client.clone(), "price_8d_ema".to_string()), + price_8d_sma: Price111dSmaPattern::new(client.clone(), "price_8d_sma".to_string()), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Market_Range { - pub price_1m_max: Indexes, - pub price_1m_min: Indexes, - pub price_1w_max: Indexes, - pub price_1w_min: Indexes, - pub price_1y_max: Indexes, - pub price_1y_min: Indexes, - pub price_2w_choppiness_index: Indexes, - pub price_2w_max: Indexes, - pub price_2w_min: Indexes, - pub price_true_range: Indexes5, - pub price_true_range_2w_sum: Indexes5, + pub price_1m_max: MetricPattern4, + pub price_1m_min: MetricPattern4, + pub price_1w_max: MetricPattern4, + pub price_1w_min: MetricPattern4, + pub price_1y_max: MetricPattern4, + pub price_1y_min: MetricPattern4, + pub price_2w_choppiness_index: MetricPattern4, + pub price_2w_max: MetricPattern4, + pub price_2w_min: MetricPattern4, + pub price_true_range: MetricPattern21, + pub price_true_range_2w_sum: MetricPattern21, } impl CatalogTree_Computed_Market_Range { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - price_1m_max: Indexes::new(client.clone(), &format!("{base_path}_price_1m_max")), - price_1m_min: Indexes::new(client.clone(), &format!("{base_path}_price_1m_min")), - price_1w_max: Indexes::new(client.clone(), &format!("{base_path}_price_1w_max")), - price_1w_min: Indexes::new(client.clone(), &format!("{base_path}_price_1w_min")), - price_1y_max: Indexes::new(client.clone(), &format!("{base_path}_price_1y_max")), - price_1y_min: Indexes::new(client.clone(), &format!("{base_path}_price_1y_min")), - price_2w_choppiness_index: Indexes::new(client.clone(), &format!("{base_path}_price_2w_choppiness_index")), - price_2w_max: Indexes::new(client.clone(), &format!("{base_path}_price_2w_max")), - price_2w_min: Indexes::new(client.clone(), &format!("{base_path}_price_2w_min")), - price_true_range: Indexes5::new(client.clone(), &format!("{base_path}_price_true_range")), - price_true_range_2w_sum: Indexes5::new(client.clone(), &format!("{base_path}_price_true_range_2w_sum")), + price_1m_max: MetricPattern4::new(client.clone(), format!("{base_path}_price_1m_max")), + price_1m_min: MetricPattern4::new(client.clone(), format!("{base_path}_price_1m_min")), + price_1w_max: MetricPattern4::new(client.clone(), format!("{base_path}_price_1w_max")), + price_1w_min: MetricPattern4::new(client.clone(), format!("{base_path}_price_1w_min")), + price_1y_max: MetricPattern4::new(client.clone(), format!("{base_path}_price_1y_max")), + price_1y_min: MetricPattern4::new(client.clone(), format!("{base_path}_price_1y_min")), + price_2w_choppiness_index: MetricPattern4::new(client.clone(), format!("{base_path}_price_2w_choppiness_index")), + price_2w_max: MetricPattern4::new(client.clone(), format!("{base_path}_price_2w_max")), + price_2w_min: MetricPattern4::new(client.clone(), format!("{base_path}_price_2w_min")), + price_true_range: MetricPattern21::new(client.clone(), format!("{base_path}_price_true_range")), + price_true_range_2w_sum: MetricPattern21::new(client.clone(), format!("{base_path}_price_true_range_2w_sum")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Market_Returns { + pub _1d_returns_1m_sd: _1dReturns1mSdPattern, + pub _1d_returns_1w_sd: _1dReturns1mSdPattern, + pub _1d_returns_1y_sd: _1dReturns1mSdPattern, + pub _10y_cagr: MetricPattern4, + pub _10y_price_returns: MetricPattern4, + pub _1d_price_returns: MetricPattern4, + pub _1m_price_returns: MetricPattern4, + pub _1w_price_returns: MetricPattern4, + pub _1y_price_returns: MetricPattern4, + pub _2y_cagr: MetricPattern4, + pub _2y_price_returns: MetricPattern4, + pub _3m_price_returns: MetricPattern4, + pub _3y_cagr: MetricPattern4, + pub _3y_price_returns: MetricPattern4, + pub _4y_cagr: MetricPattern4, + pub _4y_price_returns: MetricPattern4, + pub _5y_cagr: MetricPattern4, + pub _5y_price_returns: MetricPattern4, + pub _6m_price_returns: MetricPattern4, + pub _6y_cagr: MetricPattern4, + pub _6y_price_returns: MetricPattern4, + pub _8y_cagr: MetricPattern4, + pub _8y_price_returns: MetricPattern4, + pub downside_1m_sd: _1dReturns1mSdPattern, + pub downside_1w_sd: _1dReturns1mSdPattern, + pub downside_1y_sd: _1dReturns1mSdPattern, + pub downside_returns: MetricPattern21, +} + +impl CatalogTree_Computed_Market_Returns { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + _1d_returns_1m_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1m_sd".to_string()), + _1d_returns_1w_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1w_sd".to_string()), + _1d_returns_1y_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1y_sd".to_string()), + _10y_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_10y_cagr")), + _10y_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_10y_price_returns")), + _1d_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_1d_price_returns")), + _1m_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_1m_price_returns")), + _1w_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_1w_price_returns")), + _1y_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_1y_price_returns")), + _2y_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_2y_cagr")), + _2y_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_2y_price_returns")), + _3m_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_3m_price_returns")), + _3y_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_3y_cagr")), + _3y_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_3y_price_returns")), + _4y_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_4y_cagr")), + _4y_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_4y_price_returns")), + _5y_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_5y_cagr")), + _5y_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_5y_price_returns")), + _6m_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_6m_price_returns")), + _6y_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_6y_cagr")), + _6y_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_6y_price_returns")), + _8y_cagr: MetricPattern4::new(client.clone(), format!("{base_path}_8y_cagr")), + _8y_price_returns: MetricPattern4::new(client.clone(), format!("{base_path}_8y_price_returns")), + downside_1m_sd: _1dReturns1mSdPattern::new(client.clone(), "downside_1m_sd".to_string()), + downside_1w_sd: _1dReturns1mSdPattern::new(client.clone(), "downside_1w_sd".to_string()), + downside_1y_sd: _1dReturns1mSdPattern::new(client.clone(), "downside_1y_sd".to_string()), + downside_returns: MetricPattern21::new(client.clone(), format!("{base_path}_downside_returns")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Market_Volatility { - pub _1d_returns_1m_sd: _1dReturns1mSdPattern, - pub _1d_returns_1w_sd: _1dReturns1mSdPattern, - pub _1d_returns_1y_sd: _1dReturns1mSdPattern, - pub price_1m_volatility: Indexes, - pub price_1w_volatility: Indexes, - pub price_1y_volatility: Indexes, + pub price_1m_volatility: MetricPattern4, + pub price_1w_volatility: MetricPattern4, + pub price_1y_volatility: MetricPattern4, + pub sharpe_1m: MetricPattern21, + pub sharpe_1w: MetricPattern21, + pub sharpe_1y: MetricPattern21, + pub sortino_1m: MetricPattern21, + pub sortino_1w: MetricPattern21, + pub sortino_1y: MetricPattern21, } impl CatalogTree_Computed_Market_Volatility { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _1d_returns_1m_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1m_sd"), - _1d_returns_1w_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1w_sd"), - _1d_returns_1y_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1y_sd"), - price_1m_volatility: Indexes::new(client.clone(), &format!("{base_path}_price_1m_volatility")), - price_1w_volatility: Indexes::new(client.clone(), &format!("{base_path}_price_1w_volatility")), - price_1y_volatility: Indexes::new(client.clone(), &format!("{base_path}_price_1y_volatility")), + price_1m_volatility: MetricPattern4::new(client.clone(), format!("{base_path}_price_1m_volatility")), + price_1w_volatility: MetricPattern4::new(client.clone(), format!("{base_path}_price_1w_volatility")), + price_1y_volatility: MetricPattern4::new(client.clone(), format!("{base_path}_price_1y_volatility")), + sharpe_1m: MetricPattern21::new(client.clone(), format!("{base_path}_sharpe_1m")), + sharpe_1w: MetricPattern21::new(client.clone(), format!("{base_path}_sharpe_1w")), + sharpe_1y: MetricPattern21::new(client.clone(), format!("{base_path}_sharpe_1y")), + sortino_1m: MetricPattern21::new(client.clone(), format!("{base_path}_sortino_1m")), + sortino_1w: MetricPattern21::new(client.clone(), format!("{base_path}_sortino_1w")), + sortino_1y: MetricPattern21::new(client.clone(), format!("{base_path}_sortino_1y")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Outputs { + pub count: CatalogTree_Computed_Outputs_Count, + pub spent: CatalogTree_Computed_Outputs_Spent, +} + +impl CatalogTree_Computed_Outputs { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + count: CatalogTree_Computed_Outputs_Count::new(client.clone(), format!("{base_path}_count")), + spent: CatalogTree_Computed_Outputs_Spent::new(client.clone(), format!("{base_path}_spent")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Outputs_Count { + pub count: BlockSizePattern, + pub utxo_count: BitcoinPattern, +} + +impl CatalogTree_Computed_Outputs_Count { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + count: BlockSizePattern::new(client.clone(), "output_count".to_string()), + utxo_count: BitcoinPattern::new(client.clone(), "exact_utxo_count".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Outputs_Spent { + pub txinindex: MetricPattern29, +} + +impl CatalogTree_Computed_Outputs_Spent { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + txinindex: MetricPattern29::new(client.clone(), format!("{base_path}_txinindex")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Pools { - pub pool: Indexes2, + pub pool: MetricPattern25, pub vecs: CatalogTree_Computed_Pools_Vecs, } impl CatalogTree_Computed_Pools { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - pool: Indexes2::new(client.clone(), &format!("{base_path}_pool")), - vecs: CatalogTree_Computed_Pools_Vecs::new(client.clone(), &format!("{base_path}_vecs")), + pool: MetricPattern25::new(client.clone(), format!("{base_path}_pool")), + vecs: CatalogTree_Computed_Pools_Vecs::new(client.clone(), format!("{base_path}_vecs")), } } } @@ -2911,878 +6427,651 @@ pub struct CatalogTree_Computed_Pools_Vecs { } impl CatalogTree_Computed_Pools_Vecs { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - axbt: AXbtPattern::new(client.clone(), &format!("{base_path}_AXbt")), - aaopool: AXbtPattern::new(client.clone(), &format!("{base_path}_AaoPool")), - antpool: AXbtPattern::new(client.clone(), &format!("{base_path}_AntPool")), - arkpool: AXbtPattern::new(client.clone(), &format!("{base_path}_ArkPool")), - asicminer: AXbtPattern::new(client.clone(), &format!("{base_path}_AsicMiner")), - batpool: AXbtPattern::new(client.clone(), &format!("{base_path}_BatPool")), - bcmonster: AXbtPattern::new(client.clone(), &format!("{base_path}_BcMonster")), - bcpoolio: AXbtPattern::new(client.clone(), &format!("{base_path}_BcpoolIo")), - binancepool: AXbtPattern::new(client.clone(), &format!("{base_path}_BinancePool")), - bitclub: AXbtPattern::new(client.clone(), &format!("{base_path}_BitClub")), - bitfufupool: AXbtPattern::new(client.clone(), &format!("{base_path}_BitFuFuPool")), - bitfury: AXbtPattern::new(client.clone(), &format!("{base_path}_BitFury")), - bitminter: AXbtPattern::new(client.clone(), &format!("{base_path}_BitMinter")), - bitalo: AXbtPattern::new(client.clone(), &format!("{base_path}_Bitalo")), - bitcoinaffiliatenetwork: AXbtPattern::new(client.clone(), &format!("{base_path}_BitcoinAffiliateNetwork")), - bitcoincom: AXbtPattern::new(client.clone(), &format!("{base_path}_BitcoinCom")), - bitcoinindia: AXbtPattern::new(client.clone(), &format!("{base_path}_BitcoinIndia")), - bitcoinrussia: AXbtPattern::new(client.clone(), &format!("{base_path}_BitcoinRussia")), - bitcoinukraine: AXbtPattern::new(client.clone(), &format!("{base_path}_BitcoinUkraine")), - bitfarms: AXbtPattern::new(client.clone(), &format!("{base_path}_Bitfarms")), - bitparking: AXbtPattern::new(client.clone(), &format!("{base_path}_Bitparking")), - bitsolo: AXbtPattern::new(client.clone(), &format!("{base_path}_Bitsolo")), - bixin: AXbtPattern::new(client.clone(), &format!("{base_path}_Bixin")), - blockfills: AXbtPattern::new(client.clone(), &format!("{base_path}_BlockFills")), - braiinspool: AXbtPattern::new(client.clone(), &format!("{base_path}_BraiinsPool")), - bravomining: AXbtPattern::new(client.clone(), &format!("{base_path}_BravoMining")), - btpool: AXbtPattern::new(client.clone(), &format!("{base_path}_BtPool")), - btccom: AXbtPattern::new(client.clone(), &format!("{base_path}_BtcCom")), - btcdig: AXbtPattern::new(client.clone(), &format!("{base_path}_BtcDig")), - btcguild: AXbtPattern::new(client.clone(), &format!("{base_path}_BtcGuild")), - btclab: AXbtPattern::new(client.clone(), &format!("{base_path}_BtcLab")), - btcmp: AXbtPattern::new(client.clone(), &format!("{base_path}_BtcMp")), - btcnuggets: AXbtPattern::new(client.clone(), &format!("{base_path}_BtcNuggets")), - btcpoolparty: AXbtPattern::new(client.clone(), &format!("{base_path}_BtcPoolParty")), - btcserv: AXbtPattern::new(client.clone(), &format!("{base_path}_BtcServ")), - btctop: AXbtPattern::new(client.clone(), &format!("{base_path}_BtcTop")), - btcc: AXbtPattern::new(client.clone(), &format!("{base_path}_Btcc")), - bwpool: AXbtPattern::new(client.clone(), &format!("{base_path}_BwPool")), - bytepool: AXbtPattern::new(client.clone(), &format!("{base_path}_BytePool")), - canoe: AXbtPattern::new(client.clone(), &format!("{base_path}_Canoe")), - canoepool: AXbtPattern::new(client.clone(), &format!("{base_path}_CanoePool")), - carbonnegative: AXbtPattern::new(client.clone(), &format!("{base_path}_CarbonNegative")), - ckpool: AXbtPattern::new(client.clone(), &format!("{base_path}_CkPool")), - cloudhashing: AXbtPattern::new(client.clone(), &format!("{base_path}_CloudHashing")), - coinlab: AXbtPattern::new(client.clone(), &format!("{base_path}_CoinLab")), - cointerra: AXbtPattern::new(client.clone(), &format!("{base_path}_Cointerra")), - connectbtc: AXbtPattern::new(client.clone(), &format!("{base_path}_ConnectBtc")), - dpool: AXbtPattern::new(client.clone(), &format!("{base_path}_DPool")), - dcexploration: AXbtPattern::new(client.clone(), &format!("{base_path}_DcExploration")), - dcex: AXbtPattern::new(client.clone(), &format!("{base_path}_Dcex")), - digitalbtc: AXbtPattern::new(client.clone(), &format!("{base_path}_DigitalBtc")), - digitalxmintsy: AXbtPattern::new(client.clone(), &format!("{base_path}_DigitalXMintsy")), - eclipsemc: AXbtPattern::new(client.clone(), &format!("{base_path}_EclipseMc")), - eightbaochi: AXbtPattern::new(client.clone(), &format!("{base_path}_EightBaochi")), - ekanembtc: AXbtPattern::new(client.clone(), &format!("{base_path}_EkanemBtc")), - eligius: AXbtPattern::new(client.clone(), &format!("{base_path}_Eligius")), - emcdpool: AXbtPattern::new(client.clone(), &format!("{base_path}_EmcdPool")), - entrustcharitypool: AXbtPattern::new(client.clone(), &format!("{base_path}_EntrustCharityPool")), - eobot: AXbtPattern::new(client.clone(), &format!("{base_path}_Eobot")), - exxbw: AXbtPattern::new(client.clone(), &format!("{base_path}_ExxBw")), - f2pool: AXbtPattern::new(client.clone(), &format!("{base_path}_F2Pool")), - fiftyeightcoin: AXbtPattern::new(client.clone(), &format!("{base_path}_FiftyEightCoin")), - foundryusa: AXbtPattern::new(client.clone(), &format!("{base_path}_FoundryUsa")), - futurebitapollosolo: AXbtPattern::new(client.clone(), &format!("{base_path}_FutureBitApolloSolo")), - gbminers: AXbtPattern::new(client.clone(), &format!("{base_path}_GbMiners")), - ghashio: AXbtPattern::new(client.clone(), &format!("{base_path}_GhashIo")), - givemecoins: AXbtPattern::new(client.clone(), &format!("{base_path}_GiveMeCoins")), - gogreenlight: AXbtPattern::new(client.clone(), &format!("{base_path}_GoGreenLight")), - haozhuzhu: AXbtPattern::new(client.clone(), &format!("{base_path}_HaoZhuZhu")), - haominer: AXbtPattern::new(client.clone(), &format!("{base_path}_Haominer")), - hashbx: AXbtPattern::new(client.clone(), &format!("{base_path}_HashBx")), - hashpool: AXbtPattern::new(client.clone(), &format!("{base_path}_HashPool")), - helix: AXbtPattern::new(client.clone(), &format!("{base_path}_Helix")), - hhtt: AXbtPattern::new(client.clone(), &format!("{base_path}_Hhtt")), - hotpool: AXbtPattern::new(client.clone(), &format!("{base_path}_HotPool")), - hummerpool: AXbtPattern::new(client.clone(), &format!("{base_path}_Hummerpool")), - huobipool: AXbtPattern::new(client.clone(), &format!("{base_path}_HuobiPool")), - innopolistech: AXbtPattern::new(client.clone(), &format!("{base_path}_InnopolisTech")), - kanopool: AXbtPattern::new(client.clone(), &format!("{base_path}_KanoPool")), - kncminer: AXbtPattern::new(client.clone(), &format!("{base_path}_KncMiner")), - kucoinpool: AXbtPattern::new(client.clone(), &format!("{base_path}_KuCoinPool")), - lubiancom: AXbtPattern::new(client.clone(), &format!("{base_path}_LubianCom")), - luckypool: AXbtPattern::new(client.clone(), &format!("{base_path}_LuckyPool")), - luxor: AXbtPattern::new(client.clone(), &format!("{base_path}_Luxor")), - marapool: AXbtPattern::new(client.clone(), &format!("{base_path}_MaraPool")), - maxbtc: AXbtPattern::new(client.clone(), &format!("{base_path}_MaxBtc")), - maxipool: AXbtPattern::new(client.clone(), &format!("{base_path}_MaxiPool")), - megabigpower: AXbtPattern::new(client.clone(), &format!("{base_path}_MegaBigPower")), - minerium: AXbtPattern::new(client.clone(), &format!("{base_path}_Minerium")), - miningcity: AXbtPattern::new(client.clone(), &format!("{base_path}_MiningCity")), - miningdutch: AXbtPattern::new(client.clone(), &format!("{base_path}_MiningDutch")), - miningkings: AXbtPattern::new(client.clone(), &format!("{base_path}_MiningKings")), - miningsquared: AXbtPattern::new(client.clone(), &format!("{base_path}_MiningSquared")), - mmpool: AXbtPattern::new(client.clone(), &format!("{base_path}_Mmpool")), - mtred: AXbtPattern::new(client.clone(), &format!("{base_path}_MtRed")), - multicoinco: AXbtPattern::new(client.clone(), &format!("{base_path}_MultiCoinCo")), - multipool: AXbtPattern::new(client.clone(), &format!("{base_path}_Multipool")), - mybtccoinpool: AXbtPattern::new(client.clone(), &format!("{base_path}_MyBtcCoinPool")), - neopool: AXbtPattern::new(client.clone(), &format!("{base_path}_Neopool")), - nexious: AXbtPattern::new(client.clone(), &format!("{base_path}_Nexious")), - nicehash: AXbtPattern::new(client.clone(), &format!("{base_path}_NiceHash")), - nmcbit: AXbtPattern::new(client.clone(), &format!("{base_path}_NmcBit")), - novablock: AXbtPattern::new(client.clone(), &format!("{base_path}_NovaBlock")), - ocean: AXbtPattern::new(client.clone(), &format!("{base_path}_Ocean")), - okexpool: AXbtPattern::new(client.clone(), &format!("{base_path}_OkExPool")), - okminer: AXbtPattern::new(client.clone(), &format!("{base_path}_OkMiner")), - okkong: AXbtPattern::new(client.clone(), &format!("{base_path}_Okkong")), - okpooltop: AXbtPattern::new(client.clone(), &format!("{base_path}_OkpoolTop")), - onehash: AXbtPattern::new(client.clone(), &format!("{base_path}_OneHash")), - onem1x: AXbtPattern::new(client.clone(), &format!("{base_path}_OneM1x")), - onethash: AXbtPattern::new(client.clone(), &format!("{base_path}_OneThash")), - ozcoin: AXbtPattern::new(client.clone(), &format!("{base_path}_OzCoin")), - phashio: AXbtPattern::new(client.clone(), &format!("{base_path}_PHashIo")), - parasite: AXbtPattern::new(client.clone(), &format!("{base_path}_Parasite")), - patels: AXbtPattern::new(client.clone(), &format!("{base_path}_Patels")), - pegapool: AXbtPattern::new(client.clone(), &format!("{base_path}_PegaPool")), - phoenix: AXbtPattern::new(client.clone(), &format!("{base_path}_Phoenix")), - polmine: AXbtPattern::new(client.clone(), &format!("{base_path}_Polmine")), - pool175btc: AXbtPattern::new(client.clone(), &format!("{base_path}_Pool175btc")), - pool50btc: AXbtPattern::new(client.clone(), &format!("{base_path}_Pool50btc")), - poolin: AXbtPattern::new(client.clone(), &format!("{base_path}_Poolin")), - portlandhodl: AXbtPattern::new(client.clone(), &format!("{base_path}_PortlandHodl")), - publicpool: AXbtPattern::new(client.clone(), &format!("{base_path}_PublicPool")), - purebtccom: AXbtPattern::new(client.clone(), &format!("{base_path}_PureBtcCom")), - rawpool: AXbtPattern::new(client.clone(), &format!("{base_path}_Rawpool")), - rigpool: AXbtPattern::new(client.clone(), &format!("{base_path}_RigPool")), - sbicrypto: AXbtPattern::new(client.clone(), &format!("{base_path}_SbiCrypto")), - secpool: AXbtPattern::new(client.clone(), &format!("{base_path}_SecPool")), - secretsuperstar: AXbtPattern::new(client.clone(), &format!("{base_path}_SecretSuperstar")), - sevenpool: AXbtPattern::new(client.clone(), &format!("{base_path}_SevenPool")), - shawnp0wers: AXbtPattern::new(client.clone(), &format!("{base_path}_ShawnP0wers")), - sigmapoolcom: AXbtPattern::new(client.clone(), &format!("{base_path}_SigmapoolCom")), - simplecoinus: AXbtPattern::new(client.clone(), &format!("{base_path}_SimplecoinUs")), - solock: AXbtPattern::new(client.clone(), &format!("{base_path}_SoloCk")), - spiderpool: AXbtPattern::new(client.clone(), &format!("{base_path}_SpiderPool")), - stminingcorp: AXbtPattern::new(client.clone(), &format!("{base_path}_StMiningCorp")), - tangpool: AXbtPattern::new(client.clone(), &format!("{base_path}_Tangpool")), - tatmaspool: AXbtPattern::new(client.clone(), &format!("{base_path}_TatmasPool")), - tbdice: AXbtPattern::new(client.clone(), &format!("{base_path}_TbDice")), - telco214: AXbtPattern::new(client.clone(), &format!("{base_path}_Telco214")), - terrapool: AXbtPattern::new(client.clone(), &format!("{base_path}_TerraPool")), - tiger: AXbtPattern::new(client.clone(), &format!("{base_path}_Tiger")), - tigerpoolnet: AXbtPattern::new(client.clone(), &format!("{base_path}_TigerpoolNet")), - titan: AXbtPattern::new(client.clone(), &format!("{base_path}_Titan")), - transactioncoinmining: AXbtPattern::new(client.clone(), &format!("{base_path}_TransactionCoinMining")), - trickysbtcpool: AXbtPattern::new(client.clone(), &format!("{base_path}_TrickysBtcPool")), - triplemining: AXbtPattern::new(client.clone(), &format!("{base_path}_TripleMining")), - twentyoneinc: AXbtPattern::new(client.clone(), &format!("{base_path}_TwentyOneInc")), - ultimuspool: AXbtPattern::new(client.clone(), &format!("{base_path}_UltimusPool")), - unknown: AXbtPattern::new(client.clone(), &format!("{base_path}_Unknown")), - unomp: AXbtPattern::new(client.clone(), &format!("{base_path}_Unomp")), - viabtc: AXbtPattern::new(client.clone(), &format!("{base_path}_ViaBtc")), - waterhole: AXbtPattern::new(client.clone(), &format!("{base_path}_Waterhole")), - wayicn: AXbtPattern::new(client.clone(), &format!("{base_path}_WayiCn")), - whitepool: AXbtPattern::new(client.clone(), &format!("{base_path}_WhitePool")), - wk057: AXbtPattern::new(client.clone(), &format!("{base_path}_Wk057")), - yourbtcnet: AXbtPattern::new(client.clone(), &format!("{base_path}_YourbtcNet")), - zulupool: AXbtPattern::new(client.clone(), &format!("{base_path}_Zulupool")), + axbt: AXbtPattern::new(client.clone(), "axbt".to_string()), + aaopool: AXbtPattern::new(client.clone(), "aaopool".to_string()), + antpool: AXbtPattern::new(client.clone(), "antpool".to_string()), + arkpool: AXbtPattern::new(client.clone(), "arkpool".to_string()), + asicminer: AXbtPattern::new(client.clone(), "asicminer".to_string()), + batpool: AXbtPattern::new(client.clone(), "batpool".to_string()), + bcmonster: AXbtPattern::new(client.clone(), "bcmonster".to_string()), + bcpoolio: AXbtPattern::new(client.clone(), "bcpoolio".to_string()), + binancepool: AXbtPattern::new(client.clone(), "binancepool".to_string()), + bitclub: AXbtPattern::new(client.clone(), "bitclub".to_string()), + bitfufupool: AXbtPattern::new(client.clone(), "bitfufupool".to_string()), + bitfury: AXbtPattern::new(client.clone(), "bitfury".to_string()), + bitminter: AXbtPattern::new(client.clone(), "bitminter".to_string()), + bitalo: AXbtPattern::new(client.clone(), "bitalo".to_string()), + bitcoinaffiliatenetwork: AXbtPattern::new(client.clone(), "bitcoinaffiliatenetwork".to_string()), + bitcoincom: AXbtPattern::new(client.clone(), "bitcoincom".to_string()), + bitcoinindia: AXbtPattern::new(client.clone(), "bitcoinindia".to_string()), + bitcoinrussia: AXbtPattern::new(client.clone(), "bitcoinrussia".to_string()), + bitcoinukraine: AXbtPattern::new(client.clone(), "bitcoinukraine".to_string()), + bitfarms: AXbtPattern::new(client.clone(), "bitfarms".to_string()), + bitparking: AXbtPattern::new(client.clone(), "bitparking".to_string()), + bitsolo: AXbtPattern::new(client.clone(), "bitsolo".to_string()), + bixin: AXbtPattern::new(client.clone(), "bixin".to_string()), + blockfills: AXbtPattern::new(client.clone(), "blockfills".to_string()), + braiinspool: AXbtPattern::new(client.clone(), "braiinspool".to_string()), + bravomining: AXbtPattern::new(client.clone(), "bravomining".to_string()), + btpool: AXbtPattern::new(client.clone(), "btpool".to_string()), + btccom: AXbtPattern::new(client.clone(), "btccom".to_string()), + btcdig: AXbtPattern::new(client.clone(), "btcdig".to_string()), + btcguild: AXbtPattern::new(client.clone(), "btcguild".to_string()), + btclab: AXbtPattern::new(client.clone(), "btclab".to_string()), + btcmp: AXbtPattern::new(client.clone(), "btcmp".to_string()), + btcnuggets: AXbtPattern::new(client.clone(), "btcnuggets".to_string()), + btcpoolparty: AXbtPattern::new(client.clone(), "btcpoolparty".to_string()), + btcserv: AXbtPattern::new(client.clone(), "btcserv".to_string()), + btctop: AXbtPattern::new(client.clone(), "btctop".to_string()), + btcc: AXbtPattern::new(client.clone(), "btcc".to_string()), + bwpool: AXbtPattern::new(client.clone(), "bwpool".to_string()), + bytepool: AXbtPattern::new(client.clone(), "bytepool".to_string()), + canoe: AXbtPattern::new(client.clone(), "canoe".to_string()), + canoepool: AXbtPattern::new(client.clone(), "canoepool".to_string()), + carbonnegative: AXbtPattern::new(client.clone(), "carbonnegative".to_string()), + ckpool: AXbtPattern::new(client.clone(), "ckpool".to_string()), + cloudhashing: AXbtPattern::new(client.clone(), "cloudhashing".to_string()), + coinlab: AXbtPattern::new(client.clone(), "coinlab".to_string()), + cointerra: AXbtPattern::new(client.clone(), "cointerra".to_string()), + connectbtc: AXbtPattern::new(client.clone(), "connectbtc".to_string()), + dpool: AXbtPattern::new(client.clone(), "dpool".to_string()), + dcexploration: AXbtPattern::new(client.clone(), "dcexploration".to_string()), + dcex: AXbtPattern::new(client.clone(), "dcex".to_string()), + digitalbtc: AXbtPattern::new(client.clone(), "digitalbtc".to_string()), + digitalxmintsy: AXbtPattern::new(client.clone(), "digitalxmintsy".to_string()), + eclipsemc: AXbtPattern::new(client.clone(), "eclipsemc".to_string()), + eightbaochi: AXbtPattern::new(client.clone(), "eightbaochi".to_string()), + ekanembtc: AXbtPattern::new(client.clone(), "ekanembtc".to_string()), + eligius: AXbtPattern::new(client.clone(), "eligius".to_string()), + emcdpool: AXbtPattern::new(client.clone(), "emcdpool".to_string()), + entrustcharitypool: AXbtPattern::new(client.clone(), "entrustcharitypool".to_string()), + eobot: AXbtPattern::new(client.clone(), "eobot".to_string()), + exxbw: AXbtPattern::new(client.clone(), "exxbw".to_string()), + f2pool: AXbtPattern::new(client.clone(), "f2pool".to_string()), + fiftyeightcoin: AXbtPattern::new(client.clone(), "fiftyeightcoin".to_string()), + foundryusa: AXbtPattern::new(client.clone(), "foundryusa".to_string()), + futurebitapollosolo: AXbtPattern::new(client.clone(), "futurebitapollosolo".to_string()), + gbminers: AXbtPattern::new(client.clone(), "gbminers".to_string()), + ghashio: AXbtPattern::new(client.clone(), "ghashio".to_string()), + givemecoins: AXbtPattern::new(client.clone(), "givemecoins".to_string()), + gogreenlight: AXbtPattern::new(client.clone(), "gogreenlight".to_string()), + haozhuzhu: AXbtPattern::new(client.clone(), "haozhuzhu".to_string()), + haominer: AXbtPattern::new(client.clone(), "haominer".to_string()), + hashbx: AXbtPattern::new(client.clone(), "hashbx".to_string()), + hashpool: AXbtPattern::new(client.clone(), "hashpool".to_string()), + helix: AXbtPattern::new(client.clone(), "helix".to_string()), + hhtt: AXbtPattern::new(client.clone(), "hhtt".to_string()), + hotpool: AXbtPattern::new(client.clone(), "hotpool".to_string()), + hummerpool: AXbtPattern::new(client.clone(), "hummerpool".to_string()), + huobipool: AXbtPattern::new(client.clone(), "huobipool".to_string()), + innopolistech: AXbtPattern::new(client.clone(), "innopolistech".to_string()), + kanopool: AXbtPattern::new(client.clone(), "kanopool".to_string()), + kncminer: AXbtPattern::new(client.clone(), "kncminer".to_string()), + kucoinpool: AXbtPattern::new(client.clone(), "kucoinpool".to_string()), + lubiancom: AXbtPattern::new(client.clone(), "lubiancom".to_string()), + luckypool: AXbtPattern::new(client.clone(), "luckypool".to_string()), + luxor: AXbtPattern::new(client.clone(), "luxor".to_string()), + marapool: AXbtPattern::new(client.clone(), "marapool".to_string()), + maxbtc: AXbtPattern::new(client.clone(), "maxbtc".to_string()), + maxipool: AXbtPattern::new(client.clone(), "maxipool".to_string()), + megabigpower: AXbtPattern::new(client.clone(), "megabigpower".to_string()), + minerium: AXbtPattern::new(client.clone(), "minerium".to_string()), + miningcity: AXbtPattern::new(client.clone(), "miningcity".to_string()), + miningdutch: AXbtPattern::new(client.clone(), "miningdutch".to_string()), + miningkings: AXbtPattern::new(client.clone(), "miningkings".to_string()), + miningsquared: AXbtPattern::new(client.clone(), "miningsquared".to_string()), + mmpool: AXbtPattern::new(client.clone(), "mmpool".to_string()), + mtred: AXbtPattern::new(client.clone(), "mtred".to_string()), + multicoinco: AXbtPattern::new(client.clone(), "multicoinco".to_string()), + multipool: AXbtPattern::new(client.clone(), "multipool".to_string()), + mybtccoinpool: AXbtPattern::new(client.clone(), "mybtccoinpool".to_string()), + neopool: AXbtPattern::new(client.clone(), "neopool".to_string()), + nexious: AXbtPattern::new(client.clone(), "nexious".to_string()), + nicehash: AXbtPattern::new(client.clone(), "nicehash".to_string()), + nmcbit: AXbtPattern::new(client.clone(), "nmcbit".to_string()), + novablock: AXbtPattern::new(client.clone(), "novablock".to_string()), + ocean: AXbtPattern::new(client.clone(), "ocean".to_string()), + okexpool: AXbtPattern::new(client.clone(), "okexpool".to_string()), + okminer: AXbtPattern::new(client.clone(), "okminer".to_string()), + okkong: AXbtPattern::new(client.clone(), "okkong".to_string()), + okpooltop: AXbtPattern::new(client.clone(), "okpooltop".to_string()), + onehash: AXbtPattern::new(client.clone(), "onehash".to_string()), + onem1x: AXbtPattern::new(client.clone(), "onem1x".to_string()), + onethash: AXbtPattern::new(client.clone(), "onethash".to_string()), + ozcoin: AXbtPattern::new(client.clone(), "ozcoin".to_string()), + phashio: AXbtPattern::new(client.clone(), "phashio".to_string()), + parasite: AXbtPattern::new(client.clone(), "parasite".to_string()), + patels: AXbtPattern::new(client.clone(), "patels".to_string()), + pegapool: AXbtPattern::new(client.clone(), "pegapool".to_string()), + phoenix: AXbtPattern::new(client.clone(), "phoenix".to_string()), + polmine: AXbtPattern::new(client.clone(), "polmine".to_string()), + pool175btc: AXbtPattern::new(client.clone(), "pool175btc".to_string()), + pool50btc: AXbtPattern::new(client.clone(), "pool50btc".to_string()), + poolin: AXbtPattern::new(client.clone(), "poolin".to_string()), + portlandhodl: AXbtPattern::new(client.clone(), "portlandhodl".to_string()), + publicpool: AXbtPattern::new(client.clone(), "publicpool".to_string()), + purebtccom: AXbtPattern::new(client.clone(), "purebtccom".to_string()), + rawpool: AXbtPattern::new(client.clone(), "rawpool".to_string()), + rigpool: AXbtPattern::new(client.clone(), "rigpool".to_string()), + sbicrypto: AXbtPattern::new(client.clone(), "sbicrypto".to_string()), + secpool: AXbtPattern::new(client.clone(), "secpool".to_string()), + secretsuperstar: AXbtPattern::new(client.clone(), "secretsuperstar".to_string()), + sevenpool: AXbtPattern::new(client.clone(), "sevenpool".to_string()), + shawnp0wers: AXbtPattern::new(client.clone(), "shawnp0wers".to_string()), + sigmapoolcom: AXbtPattern::new(client.clone(), "sigmapoolcom".to_string()), + simplecoinus: AXbtPattern::new(client.clone(), "simplecoinus".to_string()), + solock: AXbtPattern::new(client.clone(), "solock".to_string()), + spiderpool: AXbtPattern::new(client.clone(), "spiderpool".to_string()), + stminingcorp: AXbtPattern::new(client.clone(), "stminingcorp".to_string()), + tangpool: AXbtPattern::new(client.clone(), "tangpool".to_string()), + tatmaspool: AXbtPattern::new(client.clone(), "tatmaspool".to_string()), + tbdice: AXbtPattern::new(client.clone(), "tbdice".to_string()), + telco214: AXbtPattern::new(client.clone(), "telco214".to_string()), + terrapool: AXbtPattern::new(client.clone(), "terrapool".to_string()), + tiger: AXbtPattern::new(client.clone(), "tiger".to_string()), + tigerpoolnet: AXbtPattern::new(client.clone(), "tigerpoolnet".to_string()), + titan: AXbtPattern::new(client.clone(), "titan".to_string()), + transactioncoinmining: AXbtPattern::new(client.clone(), "transactioncoinmining".to_string()), + trickysbtcpool: AXbtPattern::new(client.clone(), "trickysbtcpool".to_string()), + triplemining: AXbtPattern::new(client.clone(), "triplemining".to_string()), + twentyoneinc: AXbtPattern::new(client.clone(), "twentyoneinc".to_string()), + ultimuspool: AXbtPattern::new(client.clone(), "ultimuspool".to_string()), + unknown: AXbtPattern::new(client.clone(), "unknown".to_string()), + unomp: AXbtPattern::new(client.clone(), "unomp".to_string()), + viabtc: AXbtPattern::new(client.clone(), "viabtc".to_string()), + waterhole: AXbtPattern::new(client.clone(), "waterhole".to_string()), + wayicn: AXbtPattern::new(client.clone(), "wayicn".to_string()), + whitepool: AXbtPattern::new(client.clone(), "whitepool".to_string()), + wk057: AXbtPattern::new(client.clone(), "wk057".to_string()), + yourbtcnet: AXbtPattern::new(client.clone(), "yourbtcnet".to_string()), + zulupool: AXbtPattern::new(client.clone(), "zulupool".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Positions { + pub position: MetricPattern16, +} + +impl CatalogTree_Computed_Positions { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + position: MetricPattern16::new(client.clone(), format!("{base_path}_position")), } } } /// Catalog tree node. pub struct CatalogTree_Computed_Price { - pub price_close: Indexes3, - pub price_close_in_cents: Indexes13, - pub price_close_in_sats: Indexes3, - pub price_high: Indexes3, - pub price_high_in_cents: Indexes13, - pub price_high_in_sats: Indexes3, - pub price_low: Indexes3, - pub price_low_in_cents: Indexes13, - pub price_low_in_sats: Indexes3, - pub price_ohlc: Indexes3, - pub price_ohlc_in_sats: Indexes3, - pub price_open: Indexes3, - pub price_open_in_cents: Indexes13, - pub price_open_in_sats: Indexes3, + pub ohlc: CatalogTree_Computed_Price_Ohlc, + pub sats: CatalogTree_Computed_Price_Sats, + pub usd: CatalogTree_Computed_Price_Usd, } impl CatalogTree_Computed_Price { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - price_close: Indexes3::new(client.clone(), &format!("{base_path}_price_close")), - price_close_in_cents: Indexes13::new(client.clone(), &format!("{base_path}_price_close_in_cents")), - price_close_in_sats: Indexes3::new(client.clone(), &format!("{base_path}_price_close_in_sats")), - price_high: Indexes3::new(client.clone(), &format!("{base_path}_price_high")), - price_high_in_cents: Indexes13::new(client.clone(), &format!("{base_path}_price_high_in_cents")), - price_high_in_sats: Indexes3::new(client.clone(), &format!("{base_path}_price_high_in_sats")), - price_low: Indexes3::new(client.clone(), &format!("{base_path}_price_low")), - price_low_in_cents: Indexes13::new(client.clone(), &format!("{base_path}_price_low_in_cents")), - price_low_in_sats: Indexes3::new(client.clone(), &format!("{base_path}_price_low_in_sats")), - price_ohlc: Indexes3::new(client.clone(), &format!("{base_path}_price_ohlc")), - price_ohlc_in_sats: Indexes3::new(client.clone(), &format!("{base_path}_price_ohlc_in_sats")), - price_open: Indexes3::new(client.clone(), &format!("{base_path}_price_open")), - price_open_in_cents: Indexes13::new(client.clone(), &format!("{base_path}_price_open_in_cents")), - price_open_in_sats: Indexes3::new(client.clone(), &format!("{base_path}_price_open_in_sats")), + ohlc: CatalogTree_Computed_Price_Ohlc::new(client.clone(), format!("{base_path}_ohlc")), + sats: CatalogTree_Computed_Price_Sats::new(client.clone(), format!("{base_path}_sats")), + usd: CatalogTree_Computed_Price_Usd::new(client.clone(), format!("{base_path}_usd")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful { - pub addr_count: Indexes3, - pub address_cohorts: CatalogTree_Computed_Stateful_AddressCohorts, - pub addresses_data: CatalogTree_Computed_Stateful_AddressesData, - pub addresstype_to_height_to_addr_count: AddresstypeToHeightToAddrCountPattern, - pub addresstype_to_height_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern, - pub addresstype_to_indexes_to_addr_count: AddresstypeToHeightToAddrCountPattern, - pub addresstype_to_indexes_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern, - pub any_address_indexes: AddresstypeToHeightToAddrCountPattern, - pub chain_state: Indexes2, - pub empty_addr_count: Indexes3, - pub emptyaddressindex: Indexes29, - pub loadedaddressindex: Indexes30, - pub market_cap: Indexes26, - pub opreturn_supply: SupplyPattern, - pub unspendable_supply: SupplyPattern, - pub utxo_cohorts: CatalogTree_Computed_Stateful_UtxoCohorts, +pub struct CatalogTree_Computed_Price_Ohlc { + pub ohlc_in_cents: MetricPattern9, } -impl CatalogTree_Computed_Stateful { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Price_Ohlc { + pub fn new(client: Arc, base_path: String) -> Self { Self { - addr_count: Indexes3::new(client.clone(), &format!("{base_path}_addr_count")), - address_cohorts: CatalogTree_Computed_Stateful_AddressCohorts::new(client.clone(), &format!("{base_path}_address_cohorts")), - addresses_data: CatalogTree_Computed_Stateful_AddressesData::new(client.clone(), &format!("{base_path}_addresses_data")), - addresstype_to_height_to_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), &format!("{base_path}_addresstype_to_height_to_addr_count")), - addresstype_to_height_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), &format!("{base_path}_addresstype_to_height_to_empty_addr_count")), - addresstype_to_indexes_to_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), &format!("{base_path}_addresstype_to_indexes_to_addr_count")), - addresstype_to_indexes_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), &format!("{base_path}_addresstype_to_indexes_to_empty_addr_count")), - any_address_indexes: AddresstypeToHeightToAddrCountPattern::new(client.clone(), &format!("{base_path}_any_address_indexes")), - chain_state: Indexes2::new(client.clone(), &format!("{base_path}_chain_state")), - empty_addr_count: Indexes3::new(client.clone(), &format!("{base_path}_empty_addr_count")), - emptyaddressindex: Indexes29::new(client.clone(), &format!("{base_path}_emptyaddressindex")), - loadedaddressindex: Indexes30::new(client.clone(), &format!("{base_path}_loadedaddressindex")), - market_cap: Indexes26::new(client.clone(), &format!("{base_path}_market_cap")), - opreturn_supply: SupplyPattern::new(client.clone(), &format!("{base_path}_opreturn_supply")), - unspendable_supply: SupplyPattern::new(client.clone(), &format!("{base_path}_unspendable_supply")), - utxo_cohorts: CatalogTree_Computed_Stateful_UtxoCohorts::new(client.clone(), &format!("{base_path}_utxo_cohorts")), + ohlc_in_cents: MetricPattern9::new(client.clone(), format!("{base_path}_ohlc_in_cents")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_AddressCohorts { - pub amount_range: CatalogTree_Computed_Stateful_AddressCohorts_AmountRange, - pub ge_amount: CatalogTree_Computed_Stateful_AddressCohorts_GeAmount, - pub lt_amount: CatalogTree_Computed_Stateful_AddressCohorts_LtAmount, +pub struct CatalogTree_Computed_Price_Sats { + pub price_close_in_sats: MetricPattern1, + pub price_high_in_sats: MetricPattern1, + pub price_low_in_sats: MetricPattern1, + pub price_ohlc_in_sats: MetricPattern1, + pub price_open_in_sats: MetricPattern1, } -impl CatalogTree_Computed_Stateful_AddressCohorts { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Price_Sats { + pub fn new(client: Arc, base_path: String) -> Self { Self { - amount_range: CatalogTree_Computed_Stateful_AddressCohorts_AmountRange::new(client.clone(), &format!("{base_path}_amount_range")), - ge_amount: CatalogTree_Computed_Stateful_AddressCohorts_GeAmount::new(client.clone(), &format!("{base_path}_ge_amount")), - lt_amount: CatalogTree_Computed_Stateful_AddressCohorts_LtAmount::new(client.clone(), &format!("{base_path}_lt_amount")), + price_close_in_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_close_in_sats")), + price_high_in_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_high_in_sats")), + price_low_in_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_low_in_sats")), + price_ohlc_in_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_ohlc_in_sats")), + price_open_in_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_open_in_sats")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_AddressCohorts_AmountRange { - pub _0sats: _0satsPattern, - pub _100btc_to_1k_btc: _0satsPattern, - pub _100k_btc_or_more: _0satsPattern, - pub _100k_sats_to_1m_sats: _0satsPattern, - pub _100sats_to_1k_sats: _0satsPattern, - pub _10btc_to_100btc: _0satsPattern, - pub _10k_btc_to_100k_btc: _0satsPattern, - pub _10k_sats_to_100k_sats: _0satsPattern, - pub _10m_sats_to_1btc: _0satsPattern, - pub _10sats_to_100sats: _0satsPattern, - pub _1btc_to_10btc: _0satsPattern, - pub _1k_btc_to_10k_btc: _0satsPattern, - pub _1k_sats_to_10k_sats: _0satsPattern, - pub _1m_sats_to_10m_sats: _0satsPattern, - pub _1sat_to_10sats: _0satsPattern, +pub struct CatalogTree_Computed_Price_Usd { + pub price_close: MetricPattern1, + pub price_close_in_cents: MetricPattern9, + pub price_high: MetricPattern1, + pub price_high_in_cents: MetricPattern9, + pub price_low: MetricPattern1, + pub price_low_in_cents: MetricPattern9, + pub price_ohlc: MetricPattern1, + pub price_open: MetricPattern1, + pub price_open_in_cents: MetricPattern9, } -impl CatalogTree_Computed_Stateful_AddressCohorts_AmountRange { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Price_Usd { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _0sats: _0satsPattern::new(client.clone(), &format!("{base_path}__0sats")), - _100btc_to_1k_btc: _0satsPattern::new(client.clone(), &format!("{base_path}__100btc_to_1k_btc")), - _100k_btc_or_more: _0satsPattern::new(client.clone(), &format!("{base_path}__100k_btc_or_more")), - _100k_sats_to_1m_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__100k_sats_to_1m_sats")), - _100sats_to_1k_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__100sats_to_1k_sats")), - _10btc_to_100btc: _0satsPattern::new(client.clone(), &format!("{base_path}__10btc_to_100btc")), - _10k_btc_to_100k_btc: _0satsPattern::new(client.clone(), &format!("{base_path}__10k_btc_to_100k_btc")), - _10k_sats_to_100k_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__10k_sats_to_100k_sats")), - _10m_sats_to_1btc: _0satsPattern::new(client.clone(), &format!("{base_path}__10m_sats_to_1btc")), - _10sats_to_100sats: _0satsPattern::new(client.clone(), &format!("{base_path}__10sats_to_100sats")), - _1btc_to_10btc: _0satsPattern::new(client.clone(), &format!("{base_path}__1btc_to_10btc")), - _1k_btc_to_10k_btc: _0satsPattern::new(client.clone(), &format!("{base_path}__1k_btc_to_10k_btc")), - _1k_sats_to_10k_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__1k_sats_to_10k_sats")), - _1m_sats_to_10m_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__1m_sats_to_10m_sats")), - _1sat_to_10sats: _0satsPattern::new(client.clone(), &format!("{base_path}__1sat_to_10sats")), + price_close: MetricPattern1::new(client.clone(), format!("{base_path}_price_close")), + price_close_in_cents: MetricPattern9::new(client.clone(), format!("{base_path}_price_close_in_cents")), + price_high: MetricPattern1::new(client.clone(), format!("{base_path}_price_high")), + price_high_in_cents: MetricPattern9::new(client.clone(), format!("{base_path}_price_high_in_cents")), + price_low: MetricPattern1::new(client.clone(), format!("{base_path}_price_low")), + price_low_in_cents: MetricPattern9::new(client.clone(), format!("{base_path}_price_low_in_cents")), + price_ohlc: MetricPattern1::new(client.clone(), format!("{base_path}_price_ohlc")), + price_open: MetricPattern1::new(client.clone(), format!("{base_path}_price_open")), + price_open_in_cents: MetricPattern9::new(client.clone(), format!("{base_path}_price_open_in_cents")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_AddressCohorts_GeAmount { - pub _100btc: _0satsPattern, - pub _100k_sats: _0satsPattern, - pub _100sats: _0satsPattern, - pub _10btc: _0satsPattern, - pub _10k_btc: _0satsPattern, - pub _10k_sats: _0satsPattern, - pub _10m_sats: _0satsPattern, - pub _10sats: _0satsPattern, - pub _1btc: _0satsPattern, - pub _1k_btc: _0satsPattern, - pub _1k_sats: _0satsPattern, - pub _1m_sats: _0satsPattern, - pub _1sat: _0satsPattern, +pub struct CatalogTree_Computed_Scripts { + pub count: CatalogTree_Computed_Scripts_Count, + pub value: CatalogTree_Computed_Scripts_Value, } -impl CatalogTree_Computed_Stateful_AddressCohorts_GeAmount { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Scripts { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _100btc: _0satsPattern::new(client.clone(), &format!("{base_path}__100btc")), - _100k_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__100k_sats")), - _100sats: _0satsPattern::new(client.clone(), &format!("{base_path}__100sats")), - _10btc: _0satsPattern::new(client.clone(), &format!("{base_path}__10btc")), - _10k_btc: _0satsPattern::new(client.clone(), &format!("{base_path}__10k_btc")), - _10k_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__10k_sats")), - _10m_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__10m_sats")), - _10sats: _0satsPattern::new(client.clone(), &format!("{base_path}__10sats")), - _1btc: _0satsPattern::new(client.clone(), &format!("{base_path}__1btc")), - _1k_btc: _0satsPattern::new(client.clone(), &format!("{base_path}__1k_btc")), - _1k_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__1k_sats")), - _1m_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__1m_sats")), - _1sat: _0satsPattern::new(client.clone(), &format!("{base_path}__1sat")), + count: CatalogTree_Computed_Scripts_Count::new(client.clone(), format!("{base_path}_count")), + value: CatalogTree_Computed_Scripts_Value::new(client.clone(), format!("{base_path}_value")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_AddressCohorts_LtAmount { - pub _100btc: _0satsPattern, - pub _100k_btc: _0satsPattern, - pub _100k_sats: _0satsPattern, - pub _100sats: _0satsPattern, - pub _10btc: _0satsPattern, - pub _10k_btc: _0satsPattern, - pub _10k_sats: _0satsPattern, - pub _10m_sats: _0satsPattern, - pub _10sats: _0satsPattern, - pub _1btc: _0satsPattern, - pub _1k_btc: _0satsPattern, - pub _1k_sats: _0satsPattern, - pub _1m_sats: _0satsPattern, +pub struct CatalogTree_Computed_Scripts_Count { + pub emptyoutput_count: BitcoinPattern, + pub opreturn_count: BitcoinPattern, + pub p2a_count: BitcoinPattern, + pub p2ms_count: BitcoinPattern, + pub p2pk33_count: BitcoinPattern, + pub p2pk65_count: BitcoinPattern, + pub p2pkh_count: BitcoinPattern, + pub p2sh_count: BitcoinPattern, + pub p2tr_count: BitcoinPattern, + pub p2wpkh_count: BitcoinPattern, + pub p2wsh_count: BitcoinPattern, + pub segwit_adoption: SegwitAdoptionPattern, + pub segwit_count: BitcoinPattern, + pub taproot_adoption: SegwitAdoptionPattern, + pub unknownoutput_count: BitcoinPattern, } -impl CatalogTree_Computed_Stateful_AddressCohorts_LtAmount { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Scripts_Count { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _100btc: _0satsPattern::new(client.clone(), &format!("{base_path}__100btc")), - _100k_btc: _0satsPattern::new(client.clone(), &format!("{base_path}__100k_btc")), - _100k_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__100k_sats")), - _100sats: _0satsPattern::new(client.clone(), &format!("{base_path}__100sats")), - _10btc: _0satsPattern::new(client.clone(), &format!("{base_path}__10btc")), - _10k_btc: _0satsPattern::new(client.clone(), &format!("{base_path}__10k_btc")), - _10k_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__10k_sats")), - _10m_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__10m_sats")), - _10sats: _0satsPattern::new(client.clone(), &format!("{base_path}__10sats")), - _1btc: _0satsPattern::new(client.clone(), &format!("{base_path}__1btc")), - _1k_btc: _0satsPattern::new(client.clone(), &format!("{base_path}__1k_btc")), - _1k_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__1k_sats")), - _1m_sats: _0satsPattern::new(client.clone(), &format!("{base_path}__1m_sats")), + emptyoutput_count: BitcoinPattern::new(client.clone(), "emptyoutput_count".to_string()), + opreturn_count: BitcoinPattern::new(client.clone(), "opreturn_count".to_string()), + p2a_count: BitcoinPattern::new(client.clone(), "p2a_count".to_string()), + p2ms_count: BitcoinPattern::new(client.clone(), "p2ms_count".to_string()), + p2pk33_count: BitcoinPattern::new(client.clone(), "p2pk33_count".to_string()), + p2pk65_count: BitcoinPattern::new(client.clone(), "p2pk65_count".to_string()), + p2pkh_count: BitcoinPattern::new(client.clone(), "p2pkh_count".to_string()), + p2sh_count: BitcoinPattern::new(client.clone(), "p2sh_count".to_string()), + p2tr_count: BitcoinPattern::new(client.clone(), "p2tr_count".to_string()), + p2wpkh_count: BitcoinPattern::new(client.clone(), "p2wpkh_count".to_string()), + p2wsh_count: BitcoinPattern::new(client.clone(), "p2wsh_count".to_string()), + segwit_adoption: SegwitAdoptionPattern::new(client.clone(), "segwit_adoption".to_string()), + segwit_count: BitcoinPattern::new(client.clone(), "segwit_count".to_string()), + taproot_adoption: SegwitAdoptionPattern::new(client.clone(), "taproot_adoption".to_string()), + unknownoutput_count: BitcoinPattern::new(client.clone(), "unknownoutput_count".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_AddressesData { - pub empty: Indexes29, - pub loaded: Indexes30, +pub struct CatalogTree_Computed_Scripts_Value { + pub opreturn_value: CatalogTree_Computed_Scripts_Value_OpreturnValue, } -impl CatalogTree_Computed_Stateful_AddressesData { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Scripts_Value { + pub fn new(client: Arc, base_path: String) -> Self { Self { - empty: Indexes29::new(client.clone(), &format!("{base_path}_empty")), - loaded: Indexes30::new(client.clone(), &format!("{base_path}_loaded")), + opreturn_value: CatalogTree_Computed_Scripts_Value_OpreturnValue::new(client.clone(), format!("{base_path}_opreturn_value")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts { - pub age_range: CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange, - pub all: CatalogTree_Computed_Stateful_UtxoCohorts_All, - pub amount_range: CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange, - pub epoch: CatalogTree_Computed_Stateful_UtxoCohorts_Epoch, - pub ge_amount: CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount, - pub lt_amount: CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount, - pub max_age: CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge, - pub min_age: CatalogTree_Computed_Stateful_UtxoCohorts_MinAge, - pub term: CatalogTree_Computed_Stateful_UtxoCohorts_Term, - pub type_: CatalogTree_Computed_Stateful_UtxoCohorts_Type, - pub year: CatalogTree_Computed_Stateful_UtxoCohorts_Year, +pub struct CatalogTree_Computed_Scripts_Value_OpreturnValue { + pub base: MetricPattern25, + pub bitcoin: SegwitAdoptionPattern, + pub dollars: SegwitAdoptionPattern, + pub sats: CatalogTree_Computed_Scripts_Value_OpreturnValue_Sats, } -impl CatalogTree_Computed_Stateful_UtxoCohorts { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Scripts_Value_OpreturnValue { + pub fn new(client: Arc, base_path: String) -> Self { Self { - age_range: CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange::new(client.clone(), &format!("{base_path}_age_range")), - all: CatalogTree_Computed_Stateful_UtxoCohorts_All::new(client.clone(), &format!("{base_path}_all")), - amount_range: CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange::new(client.clone(), &format!("{base_path}_amount_range")), - epoch: CatalogTree_Computed_Stateful_UtxoCohorts_Epoch::new(client.clone(), &format!("{base_path}_epoch")), - ge_amount: CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount::new(client.clone(), &format!("{base_path}_ge_amount")), - lt_amount: CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount::new(client.clone(), &format!("{base_path}_lt_amount")), - max_age: CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge::new(client.clone(), &format!("{base_path}_max_age")), - min_age: CatalogTree_Computed_Stateful_UtxoCohorts_MinAge::new(client.clone(), &format!("{base_path}_min_age")), - term: CatalogTree_Computed_Stateful_UtxoCohorts_Term::new(client.clone(), &format!("{base_path}_term")), - type_: CatalogTree_Computed_Stateful_UtxoCohorts_Type::new(client.clone(), &format!("{base_path}_type_")), - year: CatalogTree_Computed_Stateful_UtxoCohorts_Year::new(client.clone(), &format!("{base_path}_year")), + base: MetricPattern25::new(client.clone(), format!("{base_path}_base")), + bitcoin: SegwitAdoptionPattern::new(client.clone(), "opreturn_value_btc".to_string()), + dollars: SegwitAdoptionPattern::new(client.clone(), "opreturn_value_usd".to_string()), + sats: CatalogTree_Computed_Scripts_Value_OpreturnValue_Sats::new(client.clone(), format!("{base_path}_sats")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange { - pub _10y_to_12y: _10yTo12yPattern, - pub _12y_to_15y: _10yTo12yPattern, - pub _1d_to_1w: _10yTo12yPattern, - pub _1m_to_2m: _10yTo12yPattern, - pub _1w_to_1m: _10yTo12yPattern, - pub _1y_to_2y: _10yTo12yPattern, - pub _2m_to_3m: _10yTo12yPattern, - pub _2y_to_3y: _10yTo12yPattern, - pub _3m_to_4m: _10yTo12yPattern, - pub _3y_to_4y: _10yTo12yPattern, - pub _4m_to_5m: _10yTo12yPattern, - pub _4y_to_5y: _10yTo12yPattern, - pub _5m_to_6m: _10yTo12yPattern, - pub _5y_to_6y: _10yTo12yPattern, - pub _6m_to_1y: _10yTo12yPattern, - pub _6y_to_7y: _10yTo12yPattern, - pub _7y_to_8y: _10yTo12yPattern, - pub _8y_to_10y: _10yTo12yPattern, - pub from_15y: _10yTo12yPattern, - pub up_to_1d: UpTo1dPattern, +pub struct CatalogTree_Computed_Scripts_Value_OpreturnValue_Sats { + pub average: MetricPattern2, + pub cumulative: MetricPattern1, + pub max: MetricPattern2, + pub min: MetricPattern2, + pub sum: MetricPattern2, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Scripts_Value_OpreturnValue_Sats { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _10y_to_12y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__10y_to_12y")), - _12y_to_15y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__12y_to_15y")), - _1d_to_1w: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__1d_to_1w")), - _1m_to_2m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__1m_to_2m")), - _1w_to_1m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__1w_to_1m")), - _1y_to_2y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__1y_to_2y")), - _2m_to_3m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2m_to_3m")), - _2y_to_3y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2y_to_3y")), - _3m_to_4m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__3m_to_4m")), - _3y_to_4y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__3y_to_4y")), - _4m_to_5m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__4m_to_5m")), - _4y_to_5y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__4y_to_5y")), - _5m_to_6m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__5m_to_6m")), - _5y_to_6y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__5y_to_6y")), - _6m_to_1y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__6m_to_1y")), - _6y_to_7y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__6y_to_7y")), - _7y_to_8y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__7y_to_8y")), - _8y_to_10y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__8y_to_10y")), - from_15y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}_from_15y")), - up_to_1d: UpTo1dPattern::new(client.clone(), &format!("{base_path}_up_to_1d")), + average: MetricPattern2::new(client.clone(), format!("{base_path}_average")), + cumulative: MetricPattern1::new(client.clone(), format!("{base_path}_cumulative")), + max: MetricPattern2::new(client.clone(), format!("{base_path}_max")), + min: MetricPattern2::new(client.clone(), format!("{base_path}_min")), + sum: MetricPattern2::new(client.clone(), format!("{base_path}_sum")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_All { - pub activity: ActivityPattern, - pub price_paid: PricePaidPattern2, - pub realized: RealizedPattern3, - pub relative: CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative, - pub supply: SupplyPattern2, - pub unrealized: UnrealizedPattern, +pub struct CatalogTree_Computed_Supply { + pub burned: CatalogTree_Computed_Supply_Burned, + pub circulating: CatalogTree_Computed_Supply_Circulating, + pub inflation: CatalogTree_Computed_Supply_Inflation, + pub market_cap: CatalogTree_Computed_Supply_MarketCap, + pub velocity: CatalogTree_Computed_Supply_Velocity, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_All { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Supply { + pub fn new(client: Arc, base_path: String) -> Self { Self { - activity: ActivityPattern::new(client.clone(), &format!("{base_path}_activity")), - price_paid: PricePaidPattern2::new(client.clone(), &format!("{base_path}_price_paid")), - realized: RealizedPattern3::new(client.clone(), &format!("{base_path}_realized")), - relative: CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative::new(client.clone(), &format!("{base_path}_relative")), - supply: SupplyPattern2::new(client.clone(), &format!("{base_path}_supply")), - unrealized: UnrealizedPattern::new(client.clone(), &format!("{base_path}_unrealized")), + burned: CatalogTree_Computed_Supply_Burned::new(client.clone(), format!("{base_path}_burned")), + circulating: CatalogTree_Computed_Supply_Circulating::new(client.clone(), format!("{base_path}_circulating")), + inflation: CatalogTree_Computed_Supply_Inflation::new(client.clone(), format!("{base_path}_inflation")), + market_cap: CatalogTree_Computed_Supply_MarketCap::new(client.clone(), format!("{base_path}_market_cap")), + velocity: CatalogTree_Computed_Supply_Velocity::new(client.clone(), format!("{base_path}_velocity")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative { - pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27, - pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: Indexes26, - pub supply_in_loss_rel_to_own_supply: Indexes27, - pub supply_in_profit_rel_to_own_supply: Indexes27, - pub unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27, - pub unrealized_profit_rel_to_own_total_unrealized_pnl: Indexes27, +pub struct CatalogTree_Computed_Supply_Burned { + pub opreturn: OpreturnPattern, + pub unspendable: OpreturnPattern, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Supply_Burned { + pub fn new(client: Arc, base_path: String) -> Self { Self { - neg_unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27::new(client.clone(), &format!("{base_path}_neg_unrealized_loss_rel_to_own_total_unrealized_pnl")), - net_unrealized_pnl_rel_to_own_total_unrealized_pnl: Indexes26::new(client.clone(), &format!("{base_path}_net_unrealized_pnl_rel_to_own_total_unrealized_pnl")), - supply_in_loss_rel_to_own_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_loss_rel_to_own_supply")), - supply_in_profit_rel_to_own_supply: Indexes27::new(client.clone(), &format!("{base_path}_supply_in_profit_rel_to_own_supply")), - unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_loss_rel_to_own_total_unrealized_pnl")), - unrealized_profit_rel_to_own_total_unrealized_pnl: Indexes27::new(client.clone(), &format!("{base_path}_unrealized_profit_rel_to_own_total_unrealized_pnl")), + opreturn: OpreturnPattern::new(client.clone(), "opreturn_supply".to_string()), + unspendable: OpreturnPattern::new(client.clone(), "unspendable_supply".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange { - pub _0sats: _0satsPattern2, - pub _100btc_to_1k_btc: _0satsPattern2, - pub _100k_btc_or_more: _0satsPattern2, - pub _100k_sats_to_1m_sats: _0satsPattern2, - pub _100sats_to_1k_sats: _0satsPattern2, - pub _10btc_to_100btc: _0satsPattern2, - pub _10k_btc_to_100k_btc: _0satsPattern2, - pub _10k_sats_to_100k_sats: _0satsPattern2, - pub _10m_sats_to_1btc: _0satsPattern2, - pub _10sats_to_100sats: _0satsPattern2, - pub _1btc_to_10btc: _0satsPattern2, - pub _1k_btc_to_10k_btc: _0satsPattern2, - pub _1k_sats_to_10k_sats: _0satsPattern2, - pub _1m_sats_to_10m_sats: _0satsPattern2, - pub _1sat_to_10sats: _0satsPattern2, +pub struct CatalogTree_Computed_Supply_Circulating { + pub btc: MetricPattern25, + pub indexes: ActiveSupplyPattern, + pub sats: MetricPattern25, + pub usd: MetricPattern25, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Supply_Circulating { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _0sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__0sats")), - _100btc_to_1k_btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__100btc_to_1k_btc")), - _100k_btc_or_more: _0satsPattern2::new(client.clone(), &format!("{base_path}__100k_btc_or_more")), - _100k_sats_to_1m_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__100k_sats_to_1m_sats")), - _100sats_to_1k_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__100sats_to_1k_sats")), - _10btc_to_100btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__10btc_to_100btc")), - _10k_btc_to_100k_btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__10k_btc_to_100k_btc")), - _10k_sats_to_100k_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__10k_sats_to_100k_sats")), - _10m_sats_to_1btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__10m_sats_to_1btc")), - _10sats_to_100sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__10sats_to_100sats")), - _1btc_to_10btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__1btc_to_10btc")), - _1k_btc_to_10k_btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__1k_btc_to_10k_btc")), - _1k_sats_to_10k_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__1k_sats_to_10k_sats")), - _1m_sats_to_10m_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__1m_sats_to_10m_sats")), - _1sat_to_10sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__1sat_to_10sats")), + btc: MetricPattern25::new(client.clone(), format!("{base_path}_btc")), + indexes: ActiveSupplyPattern::new(client.clone(), "circulating".to_string()), + sats: MetricPattern25::new(client.clone(), format!("{base_path}_sats")), + usd: MetricPattern25::new(client.clone(), format!("{base_path}_usd")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_Epoch { - pub _0: _10yTo12yPattern, - pub _1: _10yTo12yPattern, - pub _2: _10yTo12yPattern, - pub _3: _10yTo12yPattern, - pub _4: _10yTo12yPattern, +pub struct CatalogTree_Computed_Supply_Inflation { + pub indexes: MetricPattern4, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_Epoch { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Supply_Inflation { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _0: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__0")), - _1: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__1")), - _2: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2")), - _3: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__3")), - _4: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__4")), + indexes: MetricPattern4::new(client.clone(), format!("{base_path}_indexes")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount { - pub _100btc: _0satsPattern2, - pub _100k_sats: _0satsPattern2, - pub _100sats: _0satsPattern2, - pub _10btc: _0satsPattern2, - pub _10k_btc: _0satsPattern2, - pub _10k_sats: _0satsPattern2, - pub _10m_sats: _0satsPattern2, - pub _10sats: _0satsPattern2, - pub _1btc: _0satsPattern2, - pub _1k_btc: _0satsPattern2, - pub _1k_sats: _0satsPattern2, - pub _1m_sats: _0satsPattern2, - pub _1sat: _0satsPattern2, +pub struct CatalogTree_Computed_Supply_MarketCap { + pub height: MetricPattern25, + pub indexes: MetricPattern4, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Supply_MarketCap { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _100btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__100btc")), - _100k_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__100k_sats")), - _100sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__100sats")), - _10btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__10btc")), - _10k_btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__10k_btc")), - _10k_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__10k_sats")), - _10m_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__10m_sats")), - _10sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__10sats")), - _1btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__1btc")), - _1k_btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__1k_btc")), - _1k_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__1k_sats")), - _1m_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__1m_sats")), - _1sat: _0satsPattern2::new(client.clone(), &format!("{base_path}__1sat")), + height: MetricPattern25::new(client.clone(), format!("{base_path}_height")), + indexes: MetricPattern4::new(client.clone(), format!("{base_path}_indexes")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount { - pub _100btc: _0satsPattern2, - pub _100k_btc: _0satsPattern2, - pub _100k_sats: _0satsPattern2, - pub _100sats: _0satsPattern2, - pub _10btc: _0satsPattern2, - pub _10k_btc: _0satsPattern2, - pub _10k_sats: _0satsPattern2, - pub _10m_sats: _0satsPattern2, - pub _10sats: _0satsPattern2, - pub _1btc: _0satsPattern2, - pub _1k_btc: _0satsPattern2, - pub _1k_sats: _0satsPattern2, - pub _1m_sats: _0satsPattern2, +pub struct CatalogTree_Computed_Supply_Velocity { + pub btc: MetricPattern4, + pub usd: MetricPattern4, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Supply_Velocity { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _100btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__100btc")), - _100k_btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__100k_btc")), - _100k_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__100k_sats")), - _100sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__100sats")), - _10btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__10btc")), - _10k_btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__10k_btc")), - _10k_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__10k_sats")), - _10m_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__10m_sats")), - _10sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__10sats")), - _1btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__1btc")), - _1k_btc: _0satsPattern2::new(client.clone(), &format!("{base_path}__1k_btc")), - _1k_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__1k_sats")), - _1m_sats: _0satsPattern2::new(client.clone(), &format!("{base_path}__1m_sats")), + btc: MetricPattern4::new(client.clone(), format!("{base_path}_btc")), + usd: MetricPattern4::new(client.clone(), format!("{base_path}_usd")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge { - pub _10y: UpTo1dPattern, - pub _12y: UpTo1dPattern, - pub _15y: UpTo1dPattern, - pub _1m: UpTo1dPattern, - pub _1w: UpTo1dPattern, - pub _1y: UpTo1dPattern, - pub _2m: UpTo1dPattern, - pub _2y: UpTo1dPattern, - pub _3m: UpTo1dPattern, - pub _3y: UpTo1dPattern, - pub _4m: UpTo1dPattern, - pub _4y: UpTo1dPattern, - pub _5m: UpTo1dPattern, - pub _5y: UpTo1dPattern, - pub _6m: UpTo1dPattern, - pub _6y: UpTo1dPattern, - pub _7y: UpTo1dPattern, - pub _8y: UpTo1dPattern, +pub struct CatalogTree_Computed_Transactions { + pub count: CatalogTree_Computed_Transactions_Count, + pub fees: CatalogTree_Computed_Transactions_Fees, + pub size: CatalogTree_Computed_Transactions_Size, + pub versions: CatalogTree_Computed_Transactions_Versions, + pub volume: CatalogTree_Computed_Transactions_Volume, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Transactions { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _10y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__10y")), - _12y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__12y")), - _15y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__15y")), - _1m: UpTo1dPattern::new(client.clone(), &format!("{base_path}__1m")), - _1w: UpTo1dPattern::new(client.clone(), &format!("{base_path}__1w")), - _1y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__1y")), - _2m: UpTo1dPattern::new(client.clone(), &format!("{base_path}__2m")), - _2y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__2y")), - _3m: UpTo1dPattern::new(client.clone(), &format!("{base_path}__3m")), - _3y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__3y")), - _4m: UpTo1dPattern::new(client.clone(), &format!("{base_path}__4m")), - _4y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__4y")), - _5m: UpTo1dPattern::new(client.clone(), &format!("{base_path}__5m")), - _5y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__5y")), - _6m: UpTo1dPattern::new(client.clone(), &format!("{base_path}__6m")), - _6y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__6y")), - _7y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__7y")), - _8y: UpTo1dPattern::new(client.clone(), &format!("{base_path}__8y")), + count: CatalogTree_Computed_Transactions_Count::new(client.clone(), format!("{base_path}_count")), + fees: CatalogTree_Computed_Transactions_Fees::new(client.clone(), format!("{base_path}_fees")), + size: CatalogTree_Computed_Transactions_Size::new(client.clone(), format!("{base_path}_size")), + versions: CatalogTree_Computed_Transactions_Versions::new(client.clone(), format!("{base_path}_versions")), + volume: CatalogTree_Computed_Transactions_Volume::new(client.clone(), format!("{base_path}_volume")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_MinAge { - pub _10y: _10yTo12yPattern, - pub _12y: _10yTo12yPattern, - pub _1d: _10yTo12yPattern, - pub _1m: _10yTo12yPattern, - pub _1w: _10yTo12yPattern, - pub _1y: _10yTo12yPattern, - pub _2m: _10yTo12yPattern, - pub _2y: _10yTo12yPattern, - pub _3m: _10yTo12yPattern, - pub _3y: _10yTo12yPattern, - pub _4m: _10yTo12yPattern, - pub _4y: _10yTo12yPattern, - pub _5m: _10yTo12yPattern, - pub _5y: _10yTo12yPattern, - pub _6m: _10yTo12yPattern, - pub _6y: _10yTo12yPattern, - pub _7y: _10yTo12yPattern, - pub _8y: _10yTo12yPattern, +pub struct CatalogTree_Computed_Transactions_Count { + pub is_coinbase: MetricPattern41, + pub tx_count: BitcoinPattern, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_MinAge { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Transactions_Count { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _10y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__10y")), - _12y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__12y")), - _1d: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__1d")), - _1m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__1m")), - _1w: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__1w")), - _1y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__1y")), - _2m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2m")), - _2y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2y")), - _3m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__3m")), - _3y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__3y")), - _4m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__4m")), - _4y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__4y")), - _5m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__5m")), - _5y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__5y")), - _6m: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__6m")), - _6y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__6y")), - _7y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__7y")), - _8y: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__8y")), + is_coinbase: MetricPattern41::new(client.clone(), format!("{base_path}_is_coinbase")), + tx_count: BitcoinPattern::new(client.clone(), "tx_count".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_Term { - pub long: UpTo1dPattern, - pub short: UpTo1dPattern, +pub struct CatalogTree_Computed_Transactions_Fees { + pub fee: CatalogTree_Computed_Transactions_Fees_Fee, + pub fee_rate: CatalogTree_Computed_Transactions_Fees_FeeRate, + pub input_value: MetricPattern41, + pub output_value: MetricPattern41, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_Term { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Transactions_Fees { + pub fn new(client: Arc, base_path: String) -> Self { Self { - long: UpTo1dPattern::new(client.clone(), &format!("{base_path}_long")), - short: UpTo1dPattern::new(client.clone(), &format!("{base_path}_short")), + fee: CatalogTree_Computed_Transactions_Fees_Fee::new(client.clone(), format!("{base_path}_fee")), + fee_rate: CatalogTree_Computed_Transactions_Fees_FeeRate::new(client.clone(), format!("{base_path}_fee_rate")), + input_value: MetricPattern41::new(client.clone(), format!("{base_path}_input_value")), + output_value: MetricPattern41::new(client.clone(), format!("{base_path}_output_value")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_Type { - pub empty: _0satsPattern2, - pub p2a: _0satsPattern2, - pub p2ms: _0satsPattern2, - pub p2pk33: _0satsPattern2, - pub p2pk65: _0satsPattern2, - pub p2pkh: _0satsPattern2, - pub p2sh: _0satsPattern2, - pub p2tr: _0satsPattern2, - pub p2wpkh: _0satsPattern2, - pub p2wsh: _0satsPattern2, - pub unknown: _0satsPattern2, +pub struct CatalogTree_Computed_Transactions_Fees_Fee { + pub base: MetricPattern41, + pub bitcoin: BlockSizePattern, + pub bitcoin_txindex: MetricPattern41, + pub dollars: BlockSizePattern, + pub dollars_txindex: MetricPattern41, + pub sats: BlockSizePattern, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_Type { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Transactions_Fees_Fee { + pub fn new(client: Arc, base_path: String) -> Self { Self { - empty: _0satsPattern2::new(client.clone(), &format!("{base_path}_empty")), - p2a: _0satsPattern2::new(client.clone(), &format!("{base_path}_p2a")), - p2ms: _0satsPattern2::new(client.clone(), &format!("{base_path}_p2ms")), - p2pk33: _0satsPattern2::new(client.clone(), &format!("{base_path}_p2pk33")), - p2pk65: _0satsPattern2::new(client.clone(), &format!("{base_path}_p2pk65")), - p2pkh: _0satsPattern2::new(client.clone(), &format!("{base_path}_p2pkh")), - p2sh: _0satsPattern2::new(client.clone(), &format!("{base_path}_p2sh")), - p2tr: _0satsPattern2::new(client.clone(), &format!("{base_path}_p2tr")), - p2wpkh: _0satsPattern2::new(client.clone(), &format!("{base_path}_p2wpkh")), - p2wsh: _0satsPattern2::new(client.clone(), &format!("{base_path}_p2wsh")), - unknown: _0satsPattern2::new(client.clone(), &format!("{base_path}_unknown")), + base: MetricPattern41::new(client.clone(), format!("{base_path}_base")), + bitcoin: BlockSizePattern::new(client.clone(), "fee_btc".to_string()), + bitcoin_txindex: MetricPattern41::new(client.clone(), format!("{base_path}_bitcoin_txindex")), + dollars: BlockSizePattern::new(client.clone(), "fee_usd".to_string()), + dollars_txindex: MetricPattern41::new(client.clone(), format!("{base_path}_dollars_txindex")), + sats: BlockSizePattern::new(client.clone(), "fee".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Stateful_UtxoCohorts_Year { - pub _2009: _10yTo12yPattern, - pub _2010: _10yTo12yPattern, - pub _2011: _10yTo12yPattern, - pub _2012: _10yTo12yPattern, - pub _2013: _10yTo12yPattern, - pub _2014: _10yTo12yPattern, - pub _2015: _10yTo12yPattern, - pub _2016: _10yTo12yPattern, - pub _2017: _10yTo12yPattern, - pub _2018: _10yTo12yPattern, - pub _2019: _10yTo12yPattern, - pub _2020: _10yTo12yPattern, - pub _2021: _10yTo12yPattern, - pub _2022: _10yTo12yPattern, - pub _2023: _10yTo12yPattern, - pub _2024: _10yTo12yPattern, - pub _2025: _10yTo12yPattern, - pub _2026: _10yTo12yPattern, +pub struct CatalogTree_Computed_Transactions_Fees_FeeRate { + pub average: MetricPattern1, + pub base: MetricPattern41, + pub max: MetricPattern1, + pub median: MetricPattern25, + pub min: MetricPattern1, + pub pct10: MetricPattern25, + pub pct25: MetricPattern25, + pub pct75: MetricPattern25, + pub pct90: MetricPattern25, } -impl CatalogTree_Computed_Stateful_UtxoCohorts_Year { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Transactions_Fees_FeeRate { + pub fn new(client: Arc, base_path: String) -> Self { Self { - _2009: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2009")), - _2010: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2010")), - _2011: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2011")), - _2012: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2012")), - _2013: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2013")), - _2014: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2014")), - _2015: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2015")), - _2016: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2016")), - _2017: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2017")), - _2018: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2018")), - _2019: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2019")), - _2020: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2020")), - _2021: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2021")), - _2022: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2022")), - _2023: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2023")), - _2024: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2024")), - _2025: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2025")), - _2026: _10yTo12yPattern::new(client.clone(), &format!("{base_path}__2026")), + average: MetricPattern1::new(client.clone(), format!("{base_path}_average")), + base: MetricPattern41::new(client.clone(), format!("{base_path}_base")), + max: MetricPattern1::new(client.clone(), format!("{base_path}_max")), + median: MetricPattern25::new(client.clone(), format!("{base_path}_median")), + min: MetricPattern1::new(client.clone(), format!("{base_path}_min")), + pct10: MetricPattern25::new(client.clone(), format!("{base_path}_pct10")), + pct25: MetricPattern25::new(client.clone(), format!("{base_path}_pct25")), + pct75: MetricPattern25::new(client.clone(), format!("{base_path}_pct75")), + pct90: MetricPattern25::new(client.clone(), format!("{base_path}_pct90")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Txins { - pub txoutindex: Indexes24, - pub value: Indexes24, +pub struct CatalogTree_Computed_Transactions_Size { + pub tx_vsize: BlockIntervalPattern, + pub tx_weight: BlockIntervalPattern, + pub vsize: MetricPattern41, + pub weight: MetricPattern41, } -impl CatalogTree_Computed_Txins { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Transactions_Size { + pub fn new(client: Arc, base_path: String) -> Self { Self { - txoutindex: Indexes24::new(client.clone(), &format!("{base_path}_txoutindex")), - value: Indexes24::new(client.clone(), &format!("{base_path}_value")), + tx_vsize: BlockIntervalPattern::new(client.clone(), "tx_vsize".to_string()), + tx_weight: BlockIntervalPattern::new(client.clone(), "tx_weight".to_string()), + vsize: MetricPattern41::new(client.clone(), format!("{base_path}_vsize")), + weight: MetricPattern41::new(client.clone(), format!("{base_path}_weight")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Txouts { - pub txinindex: Indexes25, +pub struct CatalogTree_Computed_Transactions_Versions { + pub tx_v1: BlockCountPattern, + pub tx_v2: BlockCountPattern, + pub tx_v3: BlockCountPattern, } -impl CatalogTree_Computed_Txouts { - pub fn new(client: Arc, base_path: &str) -> Self { +impl CatalogTree_Computed_Transactions_Versions { + pub fn new(client: Arc, base_path: String) -> Self { Self { - txinindex: Indexes25::new(client.clone(), &format!("{base_path}_txinindex")), + tx_v1: BlockCountPattern::new(client.clone(), "tx_v1".to_string()), + tx_v2: BlockCountPattern::new(client.clone(), "tx_v2".to_string()), + tx_v3: BlockCountPattern::new(client.clone(), "tx_v3".to_string()), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Transactions_Volume { + pub annualized_volume: MetricPattern4, + pub annualized_volume_btc: MetricPattern4, + pub annualized_volume_usd: MetricPattern4, + pub inputs_per_sec: MetricPattern4, + pub outputs_per_sec: MetricPattern4, + pub sent_sum: CatalogTree_Computed_Transactions_Volume_SentSum, + pub tx_per_sec: MetricPattern4, +} + +impl CatalogTree_Computed_Transactions_Volume { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + annualized_volume: MetricPattern4::new(client.clone(), format!("{base_path}_annualized_volume")), + annualized_volume_btc: MetricPattern4::new(client.clone(), format!("{base_path}_annualized_volume_btc")), + annualized_volume_usd: MetricPattern4::new(client.clone(), format!("{base_path}_annualized_volume_usd")), + inputs_per_sec: MetricPattern4::new(client.clone(), format!("{base_path}_inputs_per_sec")), + outputs_per_sec: MetricPattern4::new(client.clone(), format!("{base_path}_outputs_per_sec")), + sent_sum: CatalogTree_Computed_Transactions_Volume_SentSum::new(client.clone(), format!("{base_path}_sent_sum")), + tx_per_sec: MetricPattern4::new(client.clone(), format!("{base_path}_tx_per_sec")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Computed_Transactions_Volume_SentSum { + pub bitcoin: TotalRealizedPnlPattern, + pub dollars: MetricPattern1, + pub sats: MetricPattern1, +} + +impl CatalogTree_Computed_Transactions_Volume_SentSum { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + bitcoin: TotalRealizedPnlPattern::new(client.clone(), "sent_sum_btc".to_string()), + dollars: MetricPattern1::new(client.clone(), format!("{base_path}_dollars")), + sats: MetricPattern1::new(client.clone(), format!("{base_path}_sats")), } } } @@ -3798,172 +7087,172 @@ pub struct CatalogTree_Indexed { } impl CatalogTree_Indexed { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - address: CatalogTree_Indexed_Address::new(client.clone(), &format!("{base_path}_address")), - block: CatalogTree_Indexed_Block::new(client.clone(), &format!("{base_path}_block")), - output: CatalogTree_Indexed_Output::new(client.clone(), &format!("{base_path}_output")), - tx: CatalogTree_Indexed_Tx::new(client.clone(), &format!("{base_path}_tx")), - txin: CatalogTree_Indexed_Txin::new(client.clone(), &format!("{base_path}_txin")), - txout: CatalogTree_Indexed_Txout::new(client.clone(), &format!("{base_path}_txout")), + address: CatalogTree_Indexed_Address::new(client.clone(), format!("{base_path}_address")), + block: CatalogTree_Indexed_Block::new(client.clone(), format!("{base_path}_block")), + output: CatalogTree_Indexed_Output::new(client.clone(), format!("{base_path}_output")), + tx: CatalogTree_Indexed_Tx::new(client.clone(), format!("{base_path}_tx")), + txin: CatalogTree_Indexed_Txin::new(client.clone(), format!("{base_path}_txin")), + txout: CatalogTree_Indexed_Txout::new(client.clone(), format!("{base_path}_txout")), } } } /// Catalog tree node. pub struct CatalogTree_Indexed_Address { - pub first_p2aaddressindex: Indexes2, - pub first_p2pk33addressindex: Indexes2, - pub first_p2pk65addressindex: Indexes2, - pub first_p2pkhaddressindex: Indexes2, - pub first_p2shaddressindex: Indexes2, - pub first_p2traddressindex: Indexes2, - pub first_p2wpkhaddressindex: Indexes2, - pub first_p2wshaddressindex: Indexes2, - pub p2abytes: Indexes14, - pub p2pk33bytes: Indexes15, - pub p2pk65bytes: Indexes16, - pub p2pkhbytes: Indexes17, - pub p2shbytes: Indexes18, - pub p2trbytes: Indexes19, - pub p2wpkhbytes: Indexes20, - pub p2wshbytes: Indexes21, + pub first_p2aaddressindex: MetricPattern25, + pub first_p2pk33addressindex: MetricPattern25, + pub first_p2pk65addressindex: MetricPattern25, + pub first_p2pkhaddressindex: MetricPattern25, + pub first_p2shaddressindex: MetricPattern25, + pub first_p2traddressindex: MetricPattern25, + pub first_p2wpkhaddressindex: MetricPattern25, + pub first_p2wshaddressindex: MetricPattern25, + pub p2abytes: MetricPattern30, + pub p2pk33bytes: MetricPattern32, + pub p2pk65bytes: MetricPattern33, + pub p2pkhbytes: MetricPattern34, + pub p2shbytes: MetricPattern35, + pub p2trbytes: MetricPattern36, + pub p2wpkhbytes: MetricPattern37, + pub p2wshbytes: MetricPattern38, } impl CatalogTree_Indexed_Address { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - first_p2aaddressindex: Indexes2::new(client.clone(), &format!("{base_path}_first_p2aaddressindex")), - first_p2pk33addressindex: Indexes2::new(client.clone(), &format!("{base_path}_first_p2pk33addressindex")), - first_p2pk65addressindex: Indexes2::new(client.clone(), &format!("{base_path}_first_p2pk65addressindex")), - first_p2pkhaddressindex: Indexes2::new(client.clone(), &format!("{base_path}_first_p2pkhaddressindex")), - first_p2shaddressindex: Indexes2::new(client.clone(), &format!("{base_path}_first_p2shaddressindex")), - first_p2traddressindex: Indexes2::new(client.clone(), &format!("{base_path}_first_p2traddressindex")), - first_p2wpkhaddressindex: Indexes2::new(client.clone(), &format!("{base_path}_first_p2wpkhaddressindex")), - first_p2wshaddressindex: Indexes2::new(client.clone(), &format!("{base_path}_first_p2wshaddressindex")), - p2abytes: Indexes14::new(client.clone(), &format!("{base_path}_p2abytes")), - p2pk33bytes: Indexes15::new(client.clone(), &format!("{base_path}_p2pk33bytes")), - p2pk65bytes: Indexes16::new(client.clone(), &format!("{base_path}_p2pk65bytes")), - p2pkhbytes: Indexes17::new(client.clone(), &format!("{base_path}_p2pkhbytes")), - p2shbytes: Indexes18::new(client.clone(), &format!("{base_path}_p2shbytes")), - p2trbytes: Indexes19::new(client.clone(), &format!("{base_path}_p2trbytes")), - p2wpkhbytes: Indexes20::new(client.clone(), &format!("{base_path}_p2wpkhbytes")), - p2wshbytes: Indexes21::new(client.clone(), &format!("{base_path}_p2wshbytes")), + first_p2aaddressindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_p2aaddressindex")), + first_p2pk33addressindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_p2pk33addressindex")), + first_p2pk65addressindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_p2pk65addressindex")), + first_p2pkhaddressindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_p2pkhaddressindex")), + first_p2shaddressindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_p2shaddressindex")), + first_p2traddressindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_p2traddressindex")), + first_p2wpkhaddressindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_p2wpkhaddressindex")), + first_p2wshaddressindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_p2wshaddressindex")), + p2abytes: MetricPattern30::new(client.clone(), format!("{base_path}_p2abytes")), + p2pk33bytes: MetricPattern32::new(client.clone(), format!("{base_path}_p2pk33bytes")), + p2pk65bytes: MetricPattern33::new(client.clone(), format!("{base_path}_p2pk65bytes")), + p2pkhbytes: MetricPattern34::new(client.clone(), format!("{base_path}_p2pkhbytes")), + p2shbytes: MetricPattern35::new(client.clone(), format!("{base_path}_p2shbytes")), + p2trbytes: MetricPattern36::new(client.clone(), format!("{base_path}_p2trbytes")), + p2wpkhbytes: MetricPattern37::new(client.clone(), format!("{base_path}_p2wpkhbytes")), + p2wshbytes: MetricPattern38::new(client.clone(), format!("{base_path}_p2wshbytes")), } } } /// Catalog tree node. pub struct CatalogTree_Indexed_Block { - pub blockhash: Indexes2, - pub difficulty: Indexes2, - pub timestamp: Indexes2, - pub total_size: Indexes2, - pub weight: Indexes2, + pub blockhash: MetricPattern25, + pub difficulty: MetricPattern25, + pub timestamp: MetricPattern25, + pub total_size: MetricPattern25, + pub weight: MetricPattern25, } impl CatalogTree_Indexed_Block { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - blockhash: Indexes2::new(client.clone(), &format!("{base_path}_blockhash")), - difficulty: Indexes2::new(client.clone(), &format!("{base_path}_difficulty")), - timestamp: Indexes2::new(client.clone(), &format!("{base_path}_timestamp")), - total_size: Indexes2::new(client.clone(), &format!("{base_path}_total_size")), - weight: Indexes2::new(client.clone(), &format!("{base_path}_weight")), + blockhash: MetricPattern25::new(client.clone(), format!("{base_path}_blockhash")), + difficulty: MetricPattern25::new(client.clone(), format!("{base_path}_difficulty")), + timestamp: MetricPattern25::new(client.clone(), format!("{base_path}_timestamp")), + total_size: MetricPattern25::new(client.clone(), format!("{base_path}_total_size")), + weight: MetricPattern25::new(client.clone(), format!("{base_path}_weight")), } } } /// Catalog tree node. pub struct CatalogTree_Indexed_Output { - pub first_emptyoutputindex: Indexes2, - pub first_opreturnindex: Indexes2, - pub first_p2msoutputindex: Indexes2, - pub first_unknownoutputindex: Indexes2, - pub txindex: MetricNode, + pub first_emptyoutputindex: MetricPattern25, + pub first_opreturnindex: MetricPattern25, + pub first_p2msoutputindex: MetricPattern25, + pub first_unknownoutputindex: MetricPattern25, + pub txindex: MetricPattern7, } impl CatalogTree_Indexed_Output { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - first_emptyoutputindex: Indexes2::new(client.clone(), &format!("{base_path}_first_emptyoutputindex")), - first_opreturnindex: Indexes2::new(client.clone(), &format!("{base_path}_first_opreturnindex")), - first_p2msoutputindex: Indexes2::new(client.clone(), &format!("{base_path}_first_p2msoutputindex")), - first_unknownoutputindex: Indexes2::new(client.clone(), &format!("{base_path}_first_unknownoutputindex")), - txindex: MetricNode::new(client.clone(), format!("{base_path}_txindex")), + first_emptyoutputindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_emptyoutputindex")), + first_opreturnindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_opreturnindex")), + first_p2msoutputindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_p2msoutputindex")), + first_unknownoutputindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_unknownoutputindex")), + txindex: MetricPattern7::new(client.clone(), format!("{base_path}_txindex")), } } } /// Catalog tree node. pub struct CatalogTree_Indexed_Tx { - pub base_size: Indexes6, - pub first_txindex: Indexes2, - pub first_txinindex: Indexes6, - pub first_txoutindex: Indexes6, - pub height: Indexes6, - pub is_explicitly_rbf: Indexes6, - pub rawlocktime: Indexes6, - pub total_size: Indexes6, - pub txid: Indexes6, - pub txversion: Indexes6, + pub base_size: MetricPattern41, + pub first_txindex: MetricPattern25, + pub first_txinindex: MetricPattern41, + pub first_txoutindex: MetricPattern41, + pub height: MetricPattern41, + pub is_explicitly_rbf: MetricPattern41, + pub rawlocktime: MetricPattern41, + pub total_size: MetricPattern41, + pub txid: MetricPattern41, + pub txversion: MetricPattern41, } impl CatalogTree_Indexed_Tx { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - base_size: Indexes6::new(client.clone(), &format!("{base_path}_base_size")), - first_txindex: Indexes2::new(client.clone(), &format!("{base_path}_first_txindex")), - first_txinindex: Indexes6::new(client.clone(), &format!("{base_path}_first_txinindex")), - first_txoutindex: Indexes6::new(client.clone(), &format!("{base_path}_first_txoutindex")), - height: Indexes6::new(client.clone(), &format!("{base_path}_height")), - is_explicitly_rbf: Indexes6::new(client.clone(), &format!("{base_path}_is_explicitly_rbf")), - rawlocktime: Indexes6::new(client.clone(), &format!("{base_path}_rawlocktime")), - total_size: Indexes6::new(client.clone(), &format!("{base_path}_total_size")), - txid: Indexes6::new(client.clone(), &format!("{base_path}_txid")), - txversion: Indexes6::new(client.clone(), &format!("{base_path}_txversion")), + base_size: MetricPattern41::new(client.clone(), format!("{base_path}_base_size")), + first_txindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_txindex")), + first_txinindex: MetricPattern41::new(client.clone(), format!("{base_path}_first_txinindex")), + first_txoutindex: MetricPattern41::new(client.clone(), format!("{base_path}_first_txoutindex")), + height: MetricPattern41::new(client.clone(), format!("{base_path}_height")), + is_explicitly_rbf: MetricPattern41::new(client.clone(), format!("{base_path}_is_explicitly_rbf")), + rawlocktime: MetricPattern41::new(client.clone(), format!("{base_path}_rawlocktime")), + total_size: MetricPattern41::new(client.clone(), format!("{base_path}_total_size")), + txid: MetricPattern41::new(client.clone(), format!("{base_path}_txid")), + txversion: MetricPattern41::new(client.clone(), format!("{base_path}_txversion")), } } } /// Catalog tree node. pub struct CatalogTree_Indexed_Txin { - pub first_txinindex: Indexes2, - pub outpoint: Indexes24, - pub outputtype: Indexes24, - pub txindex: Indexes24, - pub typeindex: Indexes24, + pub first_txinindex: MetricPattern25, + pub outpoint: MetricPattern26, + pub outputtype: MetricPattern26, + pub txindex: MetricPattern26, + pub typeindex: MetricPattern26, } impl CatalogTree_Indexed_Txin { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - first_txinindex: Indexes2::new(client.clone(), &format!("{base_path}_first_txinindex")), - outpoint: Indexes24::new(client.clone(), &format!("{base_path}_outpoint")), - outputtype: Indexes24::new(client.clone(), &format!("{base_path}_outputtype")), - txindex: Indexes24::new(client.clone(), &format!("{base_path}_txindex")), - typeindex: Indexes24::new(client.clone(), &format!("{base_path}_typeindex")), + first_txinindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_txinindex")), + outpoint: MetricPattern26::new(client.clone(), format!("{base_path}_outpoint")), + outputtype: MetricPattern26::new(client.clone(), format!("{base_path}_outputtype")), + txindex: MetricPattern26::new(client.clone(), format!("{base_path}_txindex")), + typeindex: MetricPattern26::new(client.clone(), format!("{base_path}_typeindex")), } } } /// Catalog tree node. pub struct CatalogTree_Indexed_Txout { - pub first_txoutindex: Indexes2, - pub outputtype: Indexes25, - pub txindex: Indexes25, - pub typeindex: Indexes25, - pub value: Indexes25, + pub first_txoutindex: MetricPattern25, + pub outputtype: MetricPattern29, + pub txindex: MetricPattern29, + pub typeindex: MetricPattern29, + pub value: MetricPattern29, } impl CatalogTree_Indexed_Txout { - pub fn new(client: Arc, base_path: &str) -> Self { + pub fn new(client: Arc, base_path: String) -> Self { Self { - first_txoutindex: Indexes2::new(client.clone(), &format!("{base_path}_first_txoutindex")), - outputtype: Indexes25::new(client.clone(), &format!("{base_path}_outputtype")), - txindex: Indexes25::new(client.clone(), &format!("{base_path}_txindex")), - typeindex: Indexes25::new(client.clone(), &format!("{base_path}_typeindex")), - value: Indexes25::new(client.clone(), &format!("{base_path}_value")), + first_txoutindex: MetricPattern25::new(client.clone(), format!("{base_path}_first_txoutindex")), + outputtype: MetricPattern29::new(client.clone(), format!("{base_path}_outputtype")), + txindex: MetricPattern29::new(client.clone(), format!("{base_path}_txindex")), + typeindex: MetricPattern29::new(client.clone(), format!("{base_path}_typeindex")), + value: MetricPattern29::new(client.clone(), format!("{base_path}_value")), } } } @@ -3981,14 +7270,14 @@ impl BrkClient { /// Create a new client with the given base URL. pub fn new(base_url: impl Into) -> Self { let base = Arc::new(BrkClientBase::new(base_url)); - let tree = CatalogTree::new(base.clone(), ""); + let tree = CatalogTree::new(base.clone(), String::new()); Self { base, tree } } /// Create a new client with options. pub fn with_options(options: BrkClientOptions) -> Self { let base = Arc::new(BrkClientBase::with_options(options)); - let tree = CatalogTree::new(base.clone(), ""); + let tree = CatalogTree::new(base.clone(), String::new()); Self { base, tree } } diff --git a/crates/brk_computer/Cargo.toml b/crates/brk_computer/Cargo.toml index 0a9c3668d..d6a9b266e 100644 --- a/crates/brk_computer/Cargo.toml +++ b/crates/brk_computer/Cargo.toml @@ -23,7 +23,7 @@ brk_traversable = { workspace = true } brk_types = { workspace = true } derive_deref = { workspace = true } log = { workspace = true } -pco = "0.4.7" +pco = "0.4.9" rayon = { workspace = true } rustc-hash = { workspace = true } schemars = { workspace = true } diff --git a/crates/brk_computer/README.md b/crates/brk_computer/README.md index d6a05e924..ad8b1c3d1 100644 --- a/crates/brk_computer/README.md +++ b/crates/brk_computer/README.md @@ -25,16 +25,18 @@ let mut computer = Computer::forced_import(&outputs_path, &indexer, fetcher)?; computer.compute(&indexer, starting_indexes, &reader, &exit)?; // Access computed data -let supply = computer.chain.height_to_supply.get(height)?; -let realized_cap = computer.stateful.utxo.all.height_to_realized_cap.get(height)?; +let supply = computer.distribution.utxo_cohorts.all.metrics.supply.height_to_supply.get(height)?; +let realized_cap = computer.distribution.utxo_cohorts.all.metrics.realized.height_to_realized_cap.get(height)?; ``` ## Metric Categories | Module | Examples | |--------|----------| -| `chain` | Supply, subsidy, fees, transaction counts | -| `stateful` | Realized cap, MVRV, SOPR, unrealized P&L | +| `blocks` | Block count, interval, size, mining metrics, rewards | +| `transactions` | Transaction count, fee, size, volume | +| `scripts` | Output type counts | +| `distribution` | Realized cap, MVRV, SOPR, unrealized P&L, supply | | `cointime` | Liveliness, vaultedness, true market mean | | `pools` | Per-pool block counts, rewards, fees | | `market` | Market cap, NVT, Puell multiple | diff --git a/crates/brk_computer/examples/computer_read.rs b/crates/brk_computer/examples/computer_read.rs index 08f1cef44..c959a67a1 100644 --- a/crates/brk_computer/examples/computer_read.rs +++ b/crates/brk_computer/examples/computer_read.rs @@ -30,7 +30,7 @@ fn run() -> Result<()> { let computer = Computer::forced_import(&outputs_dir, &indexer, Some(fetcher))?; - let _a = dbg!(computer.chain.transaction.txindex_to_fee.region().meta()); + let _a = dbg!(computer.transactions.fees.txindex_to_fee.region().meta()); Ok(()) } diff --git a/crates/brk_computer/src/blocks/compute.rs b/crates/brk_computer/src/blocks/compute.rs new file mode 100644 index 000000000..62fb63781 --- /dev/null +++ b/crates/brk_computer/src/blocks/compute.rs @@ -0,0 +1,60 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use crate::{ComputeIndexes, indexes, price, transactions}; + +use super::Vecs; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + transactions: &transactions::Vecs, + starting_indexes: &ComputeIndexes, + price: Option<&price::Vecs>, + exit: &Exit, + ) -> Result<()> { + // Core block metrics + self.count + .compute(indexer, indexes, &self.time, starting_indexes, exit)?; + self.interval.compute(indexes, starting_indexes, exit)?; + self.size + .compute(indexer, indexes, starting_indexes, exit)?; + self.weight + .compute(indexer, indexes, starting_indexes, exit)?; + + // Time metrics (timestamps) + self.time.compute(indexes, starting_indexes, exit)?; + + // Epoch metrics + self.difficulty.compute(indexes, starting_indexes, exit)?; + self.halving.compute(indexes, starting_indexes, exit)?; + + // Rewards depends on count and transactions fees + self.rewards.compute( + indexer, + indexes, + &self.count, + &transactions.fees, + starting_indexes, + price, + exit, + )?; + + // Mining depends on count and rewards + self.mining.compute( + indexer, + indexes, + &self.count, + &self.rewards, + starting_indexes, + exit, + )?; + + let _lock = exit.lock(); + self.db.compact()?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/blocks/count/compute.rs b/crates/brk_computer/src/blocks/count/compute.rs new file mode 100644 index 000000000..857c25240 --- /dev/null +++ b/crates/brk_computer/src/blocks/count/compute.rs @@ -0,0 +1,85 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{Height, StoredU32}; +use vecdb::{Exit, TypedVecIterator}; + +use super::super::time; +use super::Vecs; +use crate::{indexes, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + time: &time::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + let mut height_to_timestamp_fixed_iter = + time.height_to_timestamp_fixed.into_iter(); + let mut prev = Height::ZERO; + self.height_to_24h_block_count.compute_transform( + starting_indexes.height, + &time.height_to_timestamp_fixed, + |(h, t, ..)| { + while t.difference_in_days_between(height_to_timestamp_fixed_iter.get_unwrap(prev)) + > 0 + { + prev.increment(); + if prev > h { + unreachable!() + } + } + (h, StoredU32::from(*h + 1 - *prev)) + }, + exit, + )?; + + self.indexes_to_block_count + .compute_all(indexes, starting_indexes, exit, |v| { + v.compute_range( + starting_indexes.height, + &indexer.vecs.block.height_to_weight, + |h| (h, StoredU32::from(1_u32)), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_1w_block_count + .compute_all(starting_indexes, exit, |v| { + v.compute_sum( + starting_indexes.dateindex, + self.indexes_to_block_count.dateindex.unwrap_sum(), + 7, + exit, + )?; + Ok(()) + })?; + + self.indexes_to_1m_block_count + .compute_all(starting_indexes, exit, |v| { + v.compute_sum( + starting_indexes.dateindex, + self.indexes_to_block_count.dateindex.unwrap_sum(), + 30, + exit, + )?; + Ok(()) + })?; + + self.indexes_to_1y_block_count + .compute_all(starting_indexes, exit, |v| { + v.compute_sum( + starting_indexes.dateindex, + self.indexes_to_block_count.dateindex.unwrap_sum(), + 365, + exit, + )?; + Ok(()) + })?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/blocks/count/import.rs b/crates/brk_computer/src/blocks/count/import.rs new file mode 100644 index 000000000..0eeeff77e --- /dev/null +++ b/crates/brk_computer/src/blocks/count/import.rs @@ -0,0 +1,108 @@ +use brk_error::Result; +use brk_types::{StoredU64, Version}; +use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1}; + +use super::Vecs; +use crate::{ + blocks::{ + TARGET_BLOCKS_PER_DAY, TARGET_BLOCKS_PER_DECADE, TARGET_BLOCKS_PER_MONTH, + TARGET_BLOCKS_PER_QUARTER, TARGET_BLOCKS_PER_SEMESTER, TARGET_BLOCKS_PER_WEEK, + TARGET_BLOCKS_PER_YEAR, + }, + indexes, + internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v0 = Version::ZERO; + let last = || VecBuilderOptions::default().add_last(); + let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative(); + + Ok(Self { + dateindex_to_block_count_target: LazyVecFrom1::init( + "block_count_target", + version + v0, + indexes.time.dateindex_to_dateindex.boxed_clone(), + |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DAY)), + ), + weekindex_to_block_count_target: LazyVecFrom1::init( + "block_count_target", + version + v0, + indexes.time.weekindex_to_weekindex.boxed_clone(), + |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_WEEK)), + ), + monthindex_to_block_count_target: LazyVecFrom1::init( + "block_count_target", + version + v0, + indexes.time.monthindex_to_monthindex.boxed_clone(), + |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_MONTH)), + ), + quarterindex_to_block_count_target: LazyVecFrom1::init( + "block_count_target", + version + v0, + indexes.time.quarterindex_to_quarterindex.boxed_clone(), + |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_QUARTER)), + ), + semesterindex_to_block_count_target: LazyVecFrom1::init( + "block_count_target", + version + v0, + indexes.time.semesterindex_to_semesterindex.boxed_clone(), + |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_SEMESTER)), + ), + yearindex_to_block_count_target: LazyVecFrom1::init( + "block_count_target", + version + v0, + indexes.time.yearindex_to_yearindex.boxed_clone(), + |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_YEAR)), + ), + decadeindex_to_block_count_target: LazyVecFrom1::init( + "block_count_target", + version + v0, + indexes.time.decadeindex_to_decadeindex.boxed_clone(), + |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DECADE)), + ), + height_to_24h_block_count: EagerVec::forced_import( + db, + "24h_block_count", + version + v0, + )?, + indexes_to_block_count: ComputedVecsFromHeight::forced_import( + db, + "block_count", + Source::Compute, + version + v0, + indexes, + sum_cum(), + )?, + indexes_to_1w_block_count: ComputedVecsFromDateIndex::forced_import( + db, + "1w_block_count", + Source::Compute, + version + v0, + indexes, + last(), + )?, + indexes_to_1m_block_count: ComputedVecsFromDateIndex::forced_import( + db, + "1m_block_count", + Source::Compute, + version + v0, + indexes, + last(), + )?, + indexes_to_1y_block_count: ComputedVecsFromDateIndex::forced_import( + db, + "1y_block_count", + Source::Compute, + version + v0, + indexes, + last(), + )?, + }) + } +} diff --git a/crates/brk_computer/src/chain/block/mod.rs b/crates/brk_computer/src/blocks/count/mod.rs similarity index 100% rename from crates/brk_computer/src/chain/block/mod.rs rename to crates/brk_computer/src/blocks/count/mod.rs diff --git a/crates/brk_computer/src/blocks/count/vecs.rs b/crates/brk_computer/src/blocks/count/vecs.rs new file mode 100644 index 000000000..c67a71ec4 --- /dev/null +++ b/crates/brk_computer/src/blocks/count/vecs.rs @@ -0,0 +1,24 @@ +use brk_traversable::Traversable; +use brk_types::{ + DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, + StoredU32, StoredU64, WeekIndex, YearIndex, +}; +use vecdb::LazyVecFrom1; + +use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub dateindex_to_block_count_target: LazyVecFrom1, + pub weekindex_to_block_count_target: LazyVecFrom1, + pub monthindex_to_block_count_target: LazyVecFrom1, + pub quarterindex_to_block_count_target: LazyVecFrom1, + pub semesterindex_to_block_count_target: LazyVecFrom1, + pub yearindex_to_block_count_target: LazyVecFrom1, + pub decadeindex_to_block_count_target: LazyVecFrom1, + pub height_to_24h_block_count: vecdb::EagerVec>, + pub indexes_to_block_count: ComputedVecsFromHeight, + pub indexes_to_1w_block_count: ComputedVecsFromDateIndex, + pub indexes_to_1m_block_count: ComputedVecsFromDateIndex, + pub indexes_to_1y_block_count: ComputedVecsFromDateIndex, +} diff --git a/crates/brk_computer/src/blocks/difficulty/compute.rs b/crates/brk_computer/src/blocks/difficulty/compute.rs new file mode 100644 index 000000000..36b3b5cdb --- /dev/null +++ b/crates/brk_computer/src/blocks/difficulty/compute.rs @@ -0,0 +1,63 @@ +use brk_error::Result; +use brk_types::StoredU32; +use vecdb::{Exit, TypedVecIterator}; + +use super::Vecs; +use super::super::TARGET_BLOCKS_PER_DAY_F32; +use crate::{indexes, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + let mut height_to_difficultyepoch_iter = + indexes.block.height_to_difficultyepoch.into_iter(); + self.indexes_to_difficultyepoch + .compute_all(starting_indexes, exit, |vec| { + let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter(); + vec.compute_transform( + starting_indexes.dateindex, + &indexes.time.dateindex_to_first_height, + |(di, height, ..)| { + ( + di, + height_to_difficultyepoch_iter + .get_unwrap(height + (*height_count_iter.get_unwrap(di) - 1)), + ) + }, + exit, + )?; + Ok(()) + })?; + + self.indexes_to_blocks_before_next_difficulty_adjustment + .compute_all(indexes, starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &indexes.block.height_to_height, + |(h, ..)| (h, StoredU32::from(h.left_before_next_diff_adj())), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_days_before_next_difficulty_adjustment + .compute_all(indexes, starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + self.indexes_to_blocks_before_next_difficulty_adjustment + .height + .as_ref() + .unwrap(), + |(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()), + exit, + )?; + Ok(()) + })?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/chain/epoch/import.rs b/crates/brk_computer/src/blocks/difficulty/import.rs similarity index 58% rename from crates/brk_computer/src/chain/epoch/import.rs rename to crates/brk_computer/src/blocks/difficulty/import.rs index 9c3cdac4a..848ccd234 100644 --- a/crates/brk_computer/src/chain/epoch/import.rs +++ b/crates/brk_computer/src/blocks/difficulty/import.rs @@ -4,8 +4,8 @@ use vecdb::Database; use super::Vecs; use crate::{ - grouped::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions}, indexes, + internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions}, }; impl Vecs { @@ -23,15 +23,6 @@ impl Vecs { indexes, last(), )?, - indexes_to_halvingepoch: ComputedVecsFromDateIndex::forced_import( - db, - "halvingepoch", - Source::Compute, - version + v0, - indexes, - last(), - )?, - // Countdown metrics (moved from mining) indexes_to_blocks_before_next_difficulty_adjustment: ComputedVecsFromHeight::forced_import( db, @@ -50,22 +41,6 @@ impl Vecs { indexes, last(), )?, - indexes_to_blocks_before_next_halving: ComputedVecsFromHeight::forced_import( - db, - "blocks_before_next_halving", - Source::Compute, - version + v2, - indexes, - last(), - )?, - indexes_to_days_before_next_halving: ComputedVecsFromHeight::forced_import( - db, - "days_before_next_halving", - Source::Compute, - version + v2, - indexes, - last(), - )?, }) } } diff --git a/crates/brk_computer/src/chain/coinbase/mod.rs b/crates/brk_computer/src/blocks/difficulty/mod.rs similarity index 100% rename from crates/brk_computer/src/chain/coinbase/mod.rs rename to crates/brk_computer/src/blocks/difficulty/mod.rs diff --git a/crates/brk_computer/src/blocks/difficulty/vecs.rs b/crates/brk_computer/src/blocks/difficulty/vecs.rs new file mode 100644 index 000000000..0fc429ba0 --- /dev/null +++ b/crates/brk_computer/src/blocks/difficulty/vecs.rs @@ -0,0 +1,12 @@ +use brk_traversable::Traversable; +use brk_types::{DifficultyEpoch, StoredF32, StoredU32}; + +use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight}; + +/// Difficulty epoch metrics and countdown +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_difficultyepoch: ComputedVecsFromDateIndex, + pub indexes_to_blocks_before_next_difficulty_adjustment: ComputedVecsFromHeight, + pub indexes_to_days_before_next_difficulty_adjustment: ComputedVecsFromHeight, +} diff --git a/crates/brk_computer/src/chain/epoch/compute.rs b/crates/brk_computer/src/blocks/halving/compute.rs similarity index 52% rename from crates/brk_computer/src/chain/epoch/compute.rs rename to crates/brk_computer/src/blocks/halving/compute.rs index b3bb30719..fa8ce7a45 100644 --- a/crates/brk_computer/src/chain/epoch/compute.rs +++ b/crates/brk_computer/src/blocks/halving/compute.rs @@ -3,7 +3,8 @@ use brk_types::StoredU32; use vecdb::{Exit, TypedVecIterator}; use super::Vecs; -use crate::{chain::TARGET_BLOCKS_PER_DAY_F32, indexes, ComputeIndexes}; +use super::super::TARGET_BLOCKS_PER_DAY_F32; +use crate::{indexes, ComputeIndexes}; impl Vecs { pub fn compute( @@ -12,26 +13,6 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let mut height_to_difficultyepoch_iter = - indexes.block.height_to_difficultyepoch.into_iter(); - self.indexes_to_difficultyepoch - .compute_all(starting_indexes, exit, |vec| { - let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter(); - vec.compute_transform( - starting_indexes.dateindex, - &indexes.time.dateindex_to_first_height, - |(di, height, ..)| { - ( - di, - height_to_difficultyepoch_iter - .get_unwrap(height + (*height_count_iter.get_unwrap(di) - 1)), - ) - }, - exit, - )?; - Ok(()) - })?; - let mut height_to_halvingepoch_iter = indexes.block.height_to_halvingepoch.into_iter(); self.indexes_to_halvingepoch .compute_all(starting_indexes, exit, |vec| { @@ -51,32 +32,6 @@ impl Vecs { Ok(()) })?; - // Countdown metrics (moved from mining) - self.indexes_to_blocks_before_next_difficulty_adjustment - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &indexes.block.height_to_height, - |(h, ..)| (h, StoredU32::from(h.left_before_next_diff_adj())), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_days_before_next_difficulty_adjustment - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - self.indexes_to_blocks_before_next_difficulty_adjustment - .height - .as_ref() - .unwrap(), - |(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()), - exit, - )?; - Ok(()) - })?; - self.indexes_to_blocks_before_next_halving.compute_all( indexes, starting_indexes, diff --git a/crates/brk_computer/src/blocks/halving/import.rs b/crates/brk_computer/src/blocks/halving/import.rs new file mode 100644 index 000000000..5e849dcd1 --- /dev/null +++ b/crates/brk_computer/src/blocks/halving/import.rs @@ -0,0 +1,44 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + let v0 = Version::ZERO; + let v2 = Version::TWO; + let last = || VecBuilderOptions::default().add_last(); + + Ok(Self { + indexes_to_halvingepoch: ComputedVecsFromDateIndex::forced_import( + db, + "halvingepoch", + Source::Compute, + version + v0, + indexes, + last(), + )?, + indexes_to_blocks_before_next_halving: ComputedVecsFromHeight::forced_import( + db, + "blocks_before_next_halving", + Source::Compute, + version + v2, + indexes, + last(), + )?, + indexes_to_days_before_next_halving: ComputedVecsFromHeight::forced_import( + db, + "days_before_next_halving", + Source::Compute, + version + v2, + indexes, + last(), + )?, + }) + } +} diff --git a/crates/brk_computer/src/chain/epoch/mod.rs b/crates/brk_computer/src/blocks/halving/mod.rs similarity index 100% rename from crates/brk_computer/src/chain/epoch/mod.rs rename to crates/brk_computer/src/blocks/halving/mod.rs diff --git a/crates/brk_computer/src/blocks/halving/vecs.rs b/crates/brk_computer/src/blocks/halving/vecs.rs new file mode 100644 index 000000000..950de887a --- /dev/null +++ b/crates/brk_computer/src/blocks/halving/vecs.rs @@ -0,0 +1,12 @@ +use brk_traversable::Traversable; +use brk_types::{HalvingEpoch, StoredF32, StoredU32}; + +use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight}; + +/// Halving epoch metrics and countdown +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_halvingepoch: ComputedVecsFromDateIndex, + pub indexes_to_blocks_before_next_halving: ComputedVecsFromHeight, + pub indexes_to_days_before_next_halving: ComputedVecsFromHeight, +} diff --git a/crates/brk_computer/src/blocks/import.rs b/crates/brk_computer/src/blocks/import.rs new file mode 100644 index 000000000..d33d3dd56 --- /dev/null +++ b/crates/brk_computer/src/blocks/import.rs @@ -0,0 +1,62 @@ +use std::path::Path; + +use brk_error::Result; +use brk_indexer::Indexer; +use brk_traversable::Traversable; +use brk_types::Version; +use vecdb::{Database, PAGE_SIZE}; + +use crate::{indexes, price}; + +use super::{ + CountVecs, DifficultyVecs, HalvingVecs, IntervalVecs, MiningVecs, + RewardsVecs, SizeVecs, TimeVecs, Vecs, WeightVecs, +}; + +impl Vecs { + pub fn forced_import( + parent_path: &Path, + parent_version: Version, + indexer: &Indexer, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + ) -> Result { + let db = Database::open(&parent_path.join(super::DB_NAME))?; + db.set_min_len(PAGE_SIZE * 50_000_000)?; + + let version = parent_version + Version::ZERO; + let compute_dollars = price.is_some(); + + let count = CountVecs::forced_import(&db, version, indexes)?; + let interval = IntervalVecs::forced_import(&db, version, indexer, indexes)?; + let size = SizeVecs::forced_import(&db, version, indexer, indexes)?; + let weight = WeightVecs::forced_import(&db, version, indexer, indexes)?; + let time = TimeVecs::forced_import(&db, version, indexer, indexes)?; + let mining = MiningVecs::forced_import(&db, version, indexer, indexes)?; + let rewards = RewardsVecs::forced_import(&db, version, indexes, compute_dollars)?; + let difficulty = DifficultyVecs::forced_import(&db, version, indexes)?; + let halving = HalvingVecs::forced_import(&db, version, indexes)?; + + let this = Self { + db, + count, + interval, + size, + weight, + time, + mining, + rewards, + difficulty, + halving, + }; + + this.db.retain_regions( + this.iter_any_exportable() + .flat_map(|v| v.region_names()) + .collect(), + )?; + this.db.compact()?; + + Ok(this) + } +} diff --git a/crates/brk_computer/src/blocks/interval/compute.rs b/crates/brk_computer/src/blocks/interval/compute.rs new file mode 100644 index 000000000..19b5965de --- /dev/null +++ b/crates/brk_computer/src/blocks/interval/compute.rs @@ -0,0 +1,23 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::Vecs; +use crate::{ComputeIndexes, indexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.indexes_to_block_interval.compute_rest( + indexes, + starting_indexes, + exit, + Some(&self.height_to_interval), + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/blocks/interval/import.rs b/crates/brk_computer/src/blocks/interval/import.rs new file mode 100644 index 000000000..583363e1b --- /dev/null +++ b/crates/brk_computer/src/blocks/interval/import.rs @@ -0,0 +1,56 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{CheckedSub, Height, Timestamp, Version}; +use vecdb::{Database, IterableCloneableVec, LazyVecFrom1}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexer: &Indexer, + indexes: &indexes::Vecs, + ) -> Result { + let v0 = Version::ZERO; + let stats = || { + VecBuilderOptions::default() + .add_average() + .add_minmax() + .add_percentiles() + }; + + let height_to_interval = LazyVecFrom1::init( + "interval", + version + v0, + indexer.vecs.block.height_to_timestamp.boxed_clone(), + |height: Height, timestamp_iter| { + let timestamp = timestamp_iter.get(height)?; + let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| { + timestamp_iter + .get(prev_h) + .map_or(Timestamp::ZERO, |prev_t| { + timestamp.checked_sub(prev_t).unwrap_or(Timestamp::ZERO) + }) + }); + Some(interval) + }, + ); + + Ok(Self { + indexes_to_block_interval: ComputedVecsFromHeight::forced_import( + db, + "block_interval", + Source::Vec(height_to_interval.boxed_clone()), + version + v0, + indexes, + stats(), + )?, + height_to_interval, + }) + } +} diff --git a/crates/brk_computer/src/chain/mining/mod.rs b/crates/brk_computer/src/blocks/interval/mod.rs similarity index 100% rename from crates/brk_computer/src/chain/mining/mod.rs rename to crates/brk_computer/src/blocks/interval/mod.rs diff --git a/crates/brk_computer/src/blocks/interval/vecs.rs b/crates/brk_computer/src/blocks/interval/vecs.rs new file mode 100644 index 000000000..64c36d2cb --- /dev/null +++ b/crates/brk_computer/src/blocks/interval/vecs.rs @@ -0,0 +1,11 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Timestamp}; +use vecdb::LazyVecFrom1; + +use crate::internal::ComputedVecsFromHeight; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub height_to_interval: LazyVecFrom1, + pub indexes_to_block_interval: ComputedVecsFromHeight, +} diff --git a/crates/brk_computer/src/chain/mining/compute.rs b/crates/brk_computer/src/blocks/mining/compute.rs similarity index 88% rename from crates/brk_computer/src/chain/mining/compute.rs rename to crates/brk_computer/src/blocks/mining/compute.rs index 10b1d90ed..02f5b315c 100644 --- a/crates/brk_computer/src/chain/mining/compute.rs +++ b/crates/brk_computer/src/blocks/mining/compute.rs @@ -4,8 +4,8 @@ use brk_types::{StoredF32, StoredF64}; use vecdb::Exit; use super::Vecs; +use super::super::{count, rewards, ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64}; use crate::{ - chain::{block, coinbase, ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64}, indexes, utils::OptionExt, ComputeIndexes, @@ -16,8 +16,8 @@ impl Vecs { &mut self, indexer: &Indexer, indexes: &indexes::Vecs, - block_vecs: &block::Vecs, - coinbase_vecs: &coinbase::Vecs, + count_vecs: &count::Vecs, + rewards_vecs: &rewards::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { @@ -44,7 +44,7 @@ impl Vecs { .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.height, - &block_vecs.height_to_24h_block_count, + &count_vecs.height_to_24h_block_count, self.indexes_to_difficulty_as_hash.height.u(), |(i, block_count_sum, difficulty_as_hash, ..)| { ( @@ -123,10 +123,16 @@ impl Vecs { .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.height, - &coinbase_vecs.height_to_24h_coinbase_usd_sum, + &rewards_vecs.height_to_24h_coinbase_usd_sum, self.indexes_to_hash_rate.height.u(), |(i, coinbase_sum, hashrate, ..)| { - (i, (*coinbase_sum / (*hashrate / ONE_TERA_HASH)).into()) + let hashrate_ths = *hashrate / ONE_TERA_HASH; + let price = if hashrate_ths == 0.0 { + StoredF32::NAN + } else { + (*coinbase_sum / hashrate_ths).into() + }; + (i, price) }, exit, )?; @@ -148,13 +154,16 @@ impl Vecs { .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.height, - &coinbase_vecs.height_to_24h_coinbase_sum, + &rewards_vecs.height_to_24h_coinbase_sum, self.indexes_to_hash_rate.height.u(), |(i, coinbase_sum, hashrate, ..)| { - ( - i, - (*coinbase_sum as f64 / (*hashrate / ONE_TERA_HASH)).into(), - ) + let hashrate_ths = *hashrate / ONE_TERA_HASH; + let value = if hashrate_ths == 0.0 { + StoredF32::NAN + } else { + StoredF32::from(*coinbase_sum as f64 / hashrate_ths) + }; + (i, value) }, exit, )?; diff --git a/crates/brk_computer/src/chain/mining/import.rs b/crates/brk_computer/src/blocks/mining/import.rs similarity index 98% rename from crates/brk_computer/src/chain/mining/import.rs rename to crates/brk_computer/src/blocks/mining/import.rs index 643be627c..315ebe407 100644 --- a/crates/brk_computer/src/chain/mining/import.rs +++ b/crates/brk_computer/src/blocks/mining/import.rs @@ -5,7 +5,7 @@ use vecdb::{Database, IterableCloneableVec}; use super::Vecs; use crate::{ - grouped::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions}, + internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions}, indexes, }; diff --git a/crates/brk_computer/src/chain/output_type/mod.rs b/crates/brk_computer/src/blocks/mining/mod.rs similarity index 100% rename from crates/brk_computer/src/chain/output_type/mod.rs rename to crates/brk_computer/src/blocks/mining/mod.rs diff --git a/crates/brk_computer/src/chain/mining/vecs.rs b/crates/brk_computer/src/blocks/mining/vecs.rs similarity index 95% rename from crates/brk_computer/src/chain/mining/vecs.rs rename to crates/brk_computer/src/blocks/mining/vecs.rs index 825c4f905..c14077999 100644 --- a/crates/brk_computer/src/chain/mining/vecs.rs +++ b/crates/brk_computer/src/blocks/mining/vecs.rs @@ -1,7 +1,7 @@ use brk_traversable::Traversable; use brk_types::{StoredF32, StoredF64}; -use crate::grouped::{ComputedVecsFromDateIndex, ComputedVecsFromHeight}; +use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight}; /// Mining-related metrics: hash rate, hash price, hash value, difficulty #[derive(Clone, Traversable)] diff --git a/crates/brk_computer/src/chain/mod.rs b/crates/brk_computer/src/blocks/mod.rs similarity index 58% rename from crates/brk_computer/src/chain/mod.rs rename to crates/brk_computer/src/blocks/mod.rs index 116107a62..420a80754 100644 --- a/crates/brk_computer/src/chain/mod.rs +++ b/crates/brk_computer/src/blocks/mod.rs @@ -1,25 +1,30 @@ -pub mod block; -pub mod coinbase; -mod compute; -pub mod epoch; -mod import; +pub mod count; +pub mod difficulty; +pub mod halving; +pub mod interval; pub mod mining; -pub mod output_type; -pub mod transaction; -pub mod volume; +pub mod rewards; +pub mod size; +pub mod time; +pub mod weight; + +mod compute; +mod import; use brk_traversable::Traversable; use vecdb::Database; -pub use block::Vecs as BlockVecs; -pub use coinbase::Vecs as CoinbaseVecs; -pub use epoch::Vecs as EpochVecs; +pub use count::Vecs as CountVecs; +pub use difficulty::Vecs as DifficultyVecs; +pub use halving::Vecs as HalvingVecs; +pub use interval::Vecs as IntervalVecs; pub use mining::Vecs as MiningVecs; -pub use output_type::Vecs as OutputTypeVecs; -pub use transaction::Vecs as TransactionVecs; -pub use volume::Vecs as VolumeVecs; +pub use rewards::Vecs as RewardsVecs; +pub use size::Vecs as SizeVecs; +pub use time::Vecs as TimeVecs; +pub use weight::Vecs as WeightVecs; -pub const DB_NAME: &str = "chain"; +pub const DB_NAME: &str = "blocks"; pub(crate) const TARGET_BLOCKS_PER_DAY_F64: f64 = 144.0; pub(crate) const TARGET_BLOCKS_PER_DAY_F32: f32 = 144.0; @@ -32,16 +37,18 @@ pub(crate) const TARGET_BLOCKS_PER_YEAR: u64 = 2 * TARGET_BLOCKS_PER_SEMESTER; pub(crate) const TARGET_BLOCKS_PER_DECADE: u64 = 10 * TARGET_BLOCKS_PER_YEAR; pub(crate) const ONE_TERA_HASH: f64 = 1_000_000_000_000.0; -/// Main chain metrics struct composed of sub-modules #[derive(Clone, Traversable)] pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, - pub block: BlockVecs, - pub epoch: EpochVecs, + + pub count: CountVecs, + pub interval: IntervalVecs, + pub size: SizeVecs, + pub weight: WeightVecs, + pub time: TimeVecs, pub mining: MiningVecs, - pub coinbase: CoinbaseVecs, - pub transaction: TransactionVecs, - pub output_type: OutputTypeVecs, - pub volume: VolumeVecs, + pub rewards: RewardsVecs, + pub difficulty: DifficultyVecs, + pub halving: HalvingVecs, } diff --git a/crates/brk_computer/src/chain/coinbase/compute.rs b/crates/brk_computer/src/blocks/rewards/compute.rs similarity index 71% rename from crates/brk_computer/src/chain/coinbase/compute.rs rename to crates/brk_computer/src/blocks/rewards/compute.rs index f0627f616..c1e3041ba 100644 --- a/crates/brk_computer/src/chain/coinbase/compute.rs +++ b/crates/brk_computer/src/blocks/rewards/compute.rs @@ -5,11 +5,12 @@ use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex}; use super::Vecs; use crate::{ + transactions, ComputeIndexes, - chain::{block, transaction}, indexes, price, utils::OptionExt, }; +use super::super::count; impl Vecs { #[allow(clippy::too_many_arguments)] @@ -17,8 +18,8 @@ impl Vecs { &mut self, indexer: &Indexer, indexes: &indexes::Vecs, - block_vecs: &block::Vecs, - transaction_vecs: &transaction::Vecs, + count_vecs: &count::Vecs, + transactions_fees: &transactions::FeesVecs, starting_indexes: &ComputeIndexes, price: Option<&price::Vecs>, exit: &Exit, @@ -61,7 +62,7 @@ impl Vecs { .into_iter(); self.height_to_24h_coinbase_sum.compute_transform( starting_indexes.height, - &block_vecs.height_to_24h_block_count, + &count_vecs.height_to_24h_block_count, |(h, count, ..)| { let range = *h - (*count - 1)..=*h; let sum = range @@ -82,7 +83,7 @@ impl Vecs { { self.height_to_24h_coinbase_usd_sum.compute_transform( starting_indexes.height, - &block_vecs.height_to_24h_block_count, + &count_vecs.height_to_24h_block_count, |(h, count, ..)| { let range = *h - (*count - 1)..=*h; let sum = range @@ -100,7 +101,7 @@ impl Vecs { vec.compute_transform2( starting_indexes.height, self.indexes_to_coinbase.sats.height.u(), - transaction_vecs.indexes_to_fee.sats.height.unwrap_sum(), + transactions_fees.indexes_to_fee.sats.height.unwrap_sum(), |(height, coinbase, fees, ..)| { ( height, @@ -135,33 +136,18 @@ impl Vecs { }, )?; - self.indexes_to_inflation_rate - .compute_all(starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.dateindex, - self.indexes_to_subsidy.sats.dateindex.unwrap_sum(), - self.indexes_to_subsidy.sats.dateindex.unwrap_cumulative(), - |(i, subsidy_1d_sum, subsidy_cumulative, ..)| { - ( - i, - (365.0 * *subsidy_1d_sum as f64 / *subsidy_cumulative as f64 * 100.0) - .into(), - ) - }, - exit, - )?; - Ok(()) - })?; - self.dateindex_to_fee_dominance.compute_transform2( starting_indexes.dateindex, - transaction_vecs.indexes_to_fee.sats.dateindex.unwrap_sum(), + transactions_fees.indexes_to_fee.sats.dateindex.unwrap_sum(), self.indexes_to_coinbase.sats.dateindex.unwrap_sum(), |(i, fee, coinbase, ..)| { - ( - i, - StoredF32::from(u64::from(fee) as f64 / u64::from(coinbase) as f64 * 100.0), - ) + let coinbase_f64 = u64::from(coinbase) as f64; + let dominance = if coinbase_f64 == 0.0 { + StoredF32::NAN + } else { + StoredF32::from(u64::from(fee) as f64 / coinbase_f64 * 100.0) + }; + (i, dominance) }, exit, )?; @@ -171,15 +157,18 @@ impl Vecs { self.indexes_to_subsidy.sats.dateindex.unwrap_sum(), self.indexes_to_coinbase.sats.dateindex.unwrap_sum(), |(i, subsidy, coinbase, ..)| { - ( - i, - StoredF32::from(u64::from(subsidy) as f64 / u64::from(coinbase) as f64 * 100.0), - ) + let coinbase_f64 = u64::from(coinbase) as f64; + let dominance = if coinbase_f64 == 0.0 { + StoredF32::NAN + } else { + StoredF32::from(u64::from(subsidy) as f64 / coinbase_f64 * 100.0) + }; + (i, dominance) }, exit, )?; - if self.indexes_to_subsidy_usd_1y_sma.is_some() { + if let Some(sma) = self.indexes_to_subsidy_usd_1y_sma.as_mut() { let date_to_coinbase_usd_sum = self .indexes_to_coinbase .dollars @@ -188,36 +177,15 @@ impl Vecs { .dateindex .unwrap_sum(); - self.indexes_to_subsidy_usd_1y_sma - .as_mut() - .unwrap() - .compute_all(starting_indexes, exit, |v| { - v.compute_sma( - starting_indexes.dateindex, - date_to_coinbase_usd_sum, - 365, - exit, - )?; - Ok(()) - })?; - - self.indexes_to_puell_multiple - .as_mut() - .unwrap() - .compute_all(starting_indexes, exit, |v| { - v.compute_divide( - starting_indexes.dateindex, - date_to_coinbase_usd_sum, - self.indexes_to_subsidy_usd_1y_sma - .as_ref() - .unwrap() - .dateindex - .as_ref() - .unwrap(), - exit, - )?; - Ok(()) - })?; + sma.compute_all(starting_indexes, exit, |v| { + v.compute_sma( + starting_indexes.dateindex, + date_to_coinbase_usd_sum, + 365, + exit, + )?; + Ok(()) + })?; } Ok(()) diff --git a/crates/brk_computer/src/chain/coinbase/import.rs b/crates/brk_computer/src/blocks/rewards/import.rs similarity index 78% rename from crates/brk_computer/src/chain/coinbase/import.rs rename to crates/brk_computer/src/blocks/rewards/import.rs index 36e619aa5..3eda32bf2 100644 --- a/crates/brk_computer/src/chain/coinbase/import.rs +++ b/crates/brk_computer/src/blocks/rewards/import.rs @@ -4,7 +4,7 @@ use vecdb::{Database, EagerVec, ImportableVec}; use super::Vecs; use crate::{ - grouped::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, Source, VecBuilderOptions}, + internal::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, Source, VecBuilderOptions}, indexes, }; @@ -84,26 +84,6 @@ impl Vecs { ) }) .transpose()?, - indexes_to_puell_multiple: compute_dollars - .then(|| { - ComputedVecsFromDateIndex::forced_import( - db, - "puell_multiple", - Source::Compute, - version + v0, - indexes, - last(), - ) - }) - .transpose()?, - indexes_to_inflation_rate: ComputedVecsFromDateIndex::forced_import( - db, - "inflation_rate", - Source::Compute, - version + v0, - indexes, - last(), - )?, }) } } diff --git a/crates/brk_computer/src/chain/transaction/mod.rs b/crates/brk_computer/src/blocks/rewards/mod.rs similarity index 100% rename from crates/brk_computer/src/chain/transaction/mod.rs rename to crates/brk_computer/src/blocks/rewards/mod.rs diff --git a/crates/brk_computer/src/chain/coinbase/vecs.rs b/crates/brk_computer/src/blocks/rewards/vecs.rs similarity index 77% rename from crates/brk_computer/src/chain/coinbase/vecs.rs rename to crates/brk_computer/src/blocks/rewards/vecs.rs index 150df5c82..e1a0188ea 100644 --- a/crates/brk_computer/src/chain/coinbase/vecs.rs +++ b/crates/brk_computer/src/blocks/rewards/vecs.rs @@ -2,7 +2,7 @@ use brk_traversable::Traversable; use brk_types::{DateIndex, Dollars, Height, Sats, StoredF32}; use vecdb::{EagerVec, PcoVec}; -use crate::grouped::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex}; +use crate::internal::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex}; /// Coinbase/subsidy/rewards metrics #[derive(Clone, Traversable)] @@ -15,6 +15,4 @@ pub struct Vecs { pub dateindex_to_fee_dominance: EagerVec>, pub dateindex_to_subsidy_dominance: EagerVec>, pub indexes_to_subsidy_usd_1y_sma: Option>, - pub indexes_to_puell_multiple: Option>, - pub indexes_to_inflation_rate: ComputedVecsFromDateIndex, } diff --git a/crates/brk_computer/src/blocks/size/compute.rs b/crates/brk_computer/src/blocks/size/compute.rs new file mode 100644 index 000000000..ec33f0cdd --- /dev/null +++ b/crates/brk_computer/src/blocks/size/compute.rs @@ -0,0 +1,32 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use super::Vecs; +use crate::{indexes, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.indexes_to_block_size.compute_rest( + indexes, + starting_indexes, + exit, + Some(&indexer.vecs.block.height_to_total_size), + )?; + + self.indexes_to_block_vbytes.compute_rest( + indexes, + starting_indexes, + exit, + Some(&self.height_to_vbytes), + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/blocks/size/import.rs b/crates/brk_computer/src/blocks/size/import.rs new file mode 100644 index 000000000..499132c08 --- /dev/null +++ b/crates/brk_computer/src/blocks/size/import.rs @@ -0,0 +1,60 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{Height, StoredU64, Version}; +use vecdb::{Database, IterableCloneableVec, LazyVecFrom1, VecIndex}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexer: &Indexer, + indexes: &indexes::Vecs, + ) -> Result { + let v0 = Version::ZERO; + let full_stats = || { + VecBuilderOptions::default() + .add_average() + .add_minmax() + .add_percentiles() + .add_sum() + .add_cumulative() + }; + + let height_to_vbytes = LazyVecFrom1::init( + "vbytes", + version + v0, + indexer.vecs.block.height_to_weight.boxed_clone(), + |height: Height, weight_iter| { + weight_iter + .get_at(height.to_usize()) + .map(|w| StoredU64::from(w.to_vbytes_floor())) + }, + ); + + Ok(Self { + indexes_to_block_size: ComputedVecsFromHeight::forced_import( + db, + "block_size", + Source::Vec(indexer.vecs.block.height_to_total_size.boxed_clone()), + version + v0, + indexes, + full_stats(), + )?, + indexes_to_block_vbytes: ComputedVecsFromHeight::forced_import( + db, + "block_vbytes", + Source::Vec(height_to_vbytes.boxed_clone()), + version + v0, + indexes, + full_stats(), + )?, + height_to_vbytes, + }) + } +} diff --git a/crates/brk_computer/src/chain/volume/mod.rs b/crates/brk_computer/src/blocks/size/mod.rs similarity index 100% rename from crates/brk_computer/src/chain/volume/mod.rs rename to crates/brk_computer/src/blocks/size/mod.rs diff --git a/crates/brk_computer/src/blocks/size/vecs.rs b/crates/brk_computer/src/blocks/size/vecs.rs new file mode 100644 index 000000000..85a240afd --- /dev/null +++ b/crates/brk_computer/src/blocks/size/vecs.rs @@ -0,0 +1,12 @@ +use brk_traversable::Traversable; +use brk_types::{Height, StoredU64, Weight}; +use vecdb::LazyVecFrom1; + +use crate::internal::ComputedVecsFromHeight; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub height_to_vbytes: LazyVecFrom1, + pub indexes_to_block_size: ComputedVecsFromHeight, + pub indexes_to_block_vbytes: ComputedVecsFromHeight, +} diff --git a/crates/brk_computer/src/blocks/time/compute.rs b/crates/brk_computer/src/blocks/time/compute.rs new file mode 100644 index 000000000..3fc4f4a4a --- /dev/null +++ b/crates/brk_computer/src/blocks/time/compute.rs @@ -0,0 +1,62 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::Timestamp; +use vecdb::{Exit, TypedVecIterator}; + +use super::Vecs; +use crate::{indexes, ComputeIndexes}; + +impl Vecs { + /// Compute height-to-time fields early, before indexes are computed. + /// These are needed by indexes::block to compute height_to_dateindex. + pub fn compute_early( + &mut self, + indexer: &Indexer, + starting_height: brk_types::Height, + exit: &Exit, + ) -> Result<()> { + let mut prev_timestamp_fixed = None; + self.height_to_timestamp_fixed.compute_transform( + starting_height, + &indexer.vecs.block.height_to_timestamp, + |(h, timestamp, height_to_timestamp_fixed_iter)| { + if prev_timestamp_fixed.is_none() + && let Some(prev_h) = h.decremented() + { + prev_timestamp_fixed.replace( + height_to_timestamp_fixed_iter + .into_iter() + .get_unwrap(prev_h), + ); + } + let timestamp_fixed = + prev_timestamp_fixed.map_or(timestamp, |prev_d| prev_d.max(timestamp)); + prev_timestamp_fixed.replace(timestamp_fixed); + (h, timestamp_fixed) + }, + exit, + )?; + + Ok(()) + } + + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.timeindexes_to_timestamp + .compute_all(starting_indexes, exit, |vec| { + vec.compute_transform( + starting_indexes.dateindex, + &indexes.time.dateindex_to_date, + |(di, d, ..)| (di, Timestamp::from(d)), + exit, + )?; + Ok(()) + })?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/blocks/time/import.rs b/crates/brk_computer/src/blocks/time/import.rs new file mode 100644 index 000000000..107b4722e --- /dev/null +++ b/crates/brk_computer/src/blocks/time/import.rs @@ -0,0 +1,61 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{Date, DifficultyEpoch, Height, Version}; +use vecdb::{ + Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, LazyVecFrom2, VecIndex, +}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromDateIndex, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexer: &Indexer, + indexes: &indexes::Vecs, + ) -> Result { + let height_to_timestamp_fixed = + EagerVec::forced_import(db, "timestamp_fixed", version + Version::ZERO)?; + + Ok(Self { + height_to_date: LazyVecFrom1::init( + "date", + version + Version::ZERO, + indexer.vecs.block.height_to_timestamp.boxed_clone(), + |height: Height, timestamp_iter| { + timestamp_iter.get_at(height.to_usize()).map(Date::from) + }, + ), + height_to_date_fixed: LazyVecFrom1::init( + "date_fixed", + version + Version::ZERO, + height_to_timestamp_fixed.boxed_clone(), + |height: Height, timestamp_iter| timestamp_iter.get(height).map(Date::from), + ), + height_to_timestamp_fixed, + difficultyepoch_to_timestamp: LazyVecFrom2::init( + "timestamp", + version + Version::ZERO, + indexes.block.difficultyepoch_to_first_height.boxed_clone(), + indexer.vecs.block.height_to_timestamp.boxed_clone(), + |di: DifficultyEpoch, first_height_iter, timestamp_iter| { + first_height_iter + .get(di) + .and_then(|h: Height| timestamp_iter.get(h)) + }, + ), + timeindexes_to_timestamp: ComputedVecsFromDateIndex::forced_import( + db, + "timestamp", + Source::Compute, + version + Version::ZERO, + indexes, + VecBuilderOptions::default().add_first(), + )?, + }) + } +} diff --git a/crates/brk_computer/src/market/history/mod.rs b/crates/brk_computer/src/blocks/time/mod.rs similarity index 100% rename from crates/brk_computer/src/market/history/mod.rs rename to crates/brk_computer/src/blocks/time/mod.rs diff --git a/crates/brk_computer/src/blocks/time/vecs.rs b/crates/brk_computer/src/blocks/time/vecs.rs new file mode 100644 index 000000000..58f8d64fd --- /dev/null +++ b/crates/brk_computer/src/blocks/time/vecs.rs @@ -0,0 +1,16 @@ +use brk_traversable::Traversable; +use brk_types::{Date, DifficultyEpoch, Height, Timestamp}; +use vecdb::{EagerVec, LazyVecFrom1, LazyVecFrom2, PcoVec}; + +use crate::internal::ComputedVecsFromDateIndex; + +/// Timestamp and date metrics for blocks +#[derive(Clone, Traversable)] +pub struct Vecs { + pub height_to_date: LazyVecFrom1, + pub height_to_date_fixed: LazyVecFrom1, + pub height_to_timestamp_fixed: EagerVec>, + pub difficultyepoch_to_timestamp: + LazyVecFrom2, + pub timeindexes_to_timestamp: ComputedVecsFromDateIndex, +} diff --git a/crates/brk_computer/src/blocks/weight/compute.rs b/crates/brk_computer/src/blocks/weight/compute.rs new file mode 100644 index 000000000..5caa10600 --- /dev/null +++ b/crates/brk_computer/src/blocks/weight/compute.rs @@ -0,0 +1,25 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use super::Vecs; +use crate::{indexes, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.indexes_to_block_weight.compute_rest( + indexes, + starting_indexes, + exit, + Some(&indexer.vecs.block.height_to_weight), + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/blocks/weight/import.rs b/crates/brk_computer/src/blocks/weight/import.rs new file mode 100644 index 000000000..49a0a5af4 --- /dev/null +++ b/crates/brk_computer/src/blocks/weight/import.rs @@ -0,0 +1,50 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::Version; +use vecdb::{Database, IterableCloneableVec}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromHeight, LazyVecsFromHeight, Source, VecBuilderOptions, WeightToFullness}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexer: &Indexer, + indexes: &indexes::Vecs, + ) -> Result { + let v0 = Version::ZERO; + let full_stats = || { + VecBuilderOptions::default() + .add_average() + .add_minmax() + .add_percentiles() + .add_sum() + .add_cumulative() + }; + + let indexes_to_block_weight = ComputedVecsFromHeight::forced_import( + db, + "block_weight", + Source::Vec(indexer.vecs.block.height_to_weight.boxed_clone()), + version + v0, + indexes, + full_stats(), + )?; + + let indexes_to_block_fullness = LazyVecsFromHeight::from_computed::( + "block_fullness", + version + v0, + indexer.vecs.block.height_to_weight.boxed_clone(), + &indexes_to_block_weight, + ); + + Ok(Self { + indexes_to_block_weight, + indexes_to_block_fullness, + }) + } +} diff --git a/crates/brk_computer/src/blocks/weight/mod.rs b/crates/brk_computer/src/blocks/weight/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/blocks/weight/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/blocks/weight/vecs.rs b/crates/brk_computer/src/blocks/weight/vecs.rs new file mode 100644 index 000000000..409b62762 --- /dev/null +++ b/crates/brk_computer/src/blocks/weight/vecs.rs @@ -0,0 +1,11 @@ +use brk_traversable::Traversable; +use brk_types::{StoredF32, Weight}; + +use crate::internal::{ComputedVecsFromHeight, LazyVecsFromHeight}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_block_weight: ComputedVecsFromHeight, + /// Block fullness as percentage of max block weight (0-100%) + pub indexes_to_block_fullness: LazyVecsFromHeight, +} diff --git a/crates/brk_computer/src/chain/block/compute.rs b/crates/brk_computer/src/chain/block/compute.rs deleted file mode 100644 index dca690c7c..000000000 --- a/crates/brk_computer/src/chain/block/compute.rs +++ /dev/null @@ -1,160 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::{CheckedSub, Height, StoredU32, StoredU64, Timestamp}; -use vecdb::{Exit, TypedVecIterator}; - -use super::Vecs; -use crate::{ComputeIndexes, indexes}; - -impl Vecs { - pub fn compute( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - let mut height_to_timestamp_fixed_iter = - indexes.block.height_to_timestamp_fixed.into_iter(); - let mut prev = Height::ZERO; - self.height_to_24h_block_count.compute_transform( - starting_indexes.height, - &indexes.block.height_to_timestamp_fixed, - |(h, t, ..)| { - while t.difference_in_days_between(height_to_timestamp_fixed_iter.get_unwrap(prev)) - > 0 - { - prev.increment(); - if prev > h { - unreachable!() - } - } - (h, StoredU32::from(*h + 1 - *prev)) - }, - exit, - )?; - - self.indexes_to_block_count - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_range( - starting_indexes.height, - &indexer.vecs.block.height_to_weight, - |h| (h, StoredU32::from(1_u32)), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_1w_block_count - .compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - self.indexes_to_block_count.dateindex.unwrap_sum(), - 7, - exit, - )?; - Ok(()) - })?; - - self.indexes_to_1m_block_count - .compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - self.indexes_to_block_count.dateindex.unwrap_sum(), - 30, - exit, - )?; - Ok(()) - })?; - - self.indexes_to_1y_block_count - .compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - self.indexes_to_block_count.dateindex.unwrap_sum(), - 365, - exit, - )?; - Ok(()) - })?; - - let mut height_to_timestamp_iter = indexer.vecs.block.height_to_timestamp.iter()?; - self.height_to_interval.compute_transform( - starting_indexes.height, - &indexer.vecs.block.height_to_timestamp, - |(height, timestamp, ..)| { - let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| { - let prev_timestamp = height_to_timestamp_iter.get_unwrap(prev_h); - timestamp - .checked_sub(prev_timestamp) - .unwrap_or(Timestamp::ZERO) - }); - (height, interval) - }, - exit, - )?; - - self.indexes_to_block_interval.compute_rest( - indexes, - starting_indexes, - exit, - Some(&self.height_to_interval), - )?; - - self.indexes_to_block_weight.compute_rest( - indexes, - starting_indexes, - exit, - Some(&indexer.vecs.block.height_to_weight), - )?; - - self.indexes_to_block_size.compute_rest( - indexes, - starting_indexes, - exit, - Some(&indexer.vecs.block.height_to_total_size), - )?; - - self.height_to_vbytes.compute_transform( - starting_indexes.height, - &indexer.vecs.block.height_to_weight, - |(h, w, ..)| { - ( - h, - StoredU64::from(bitcoin::Weight::from(w).to_vbytes_floor()), - ) - }, - exit, - )?; - - self.indexes_to_block_vbytes.compute_rest( - indexes, - starting_indexes, - exit, - Some(&self.height_to_vbytes), - )?; - - // Timestamp metrics (moved from epoch) - self.timeindexes_to_timestamp - .compute_all(starting_indexes, exit, |vec| { - vec.compute_transform( - starting_indexes.dateindex, - &indexes.time.dateindex_to_date, - |(di, d, ..)| (di, Timestamp::from(d)), - exit, - )?; - Ok(()) - })?; - - let mut height_to_timestamp_iter = indexer.vecs.block.height_to_timestamp.iter()?; - - self.difficultyepoch_to_timestamp.compute_transform( - starting_indexes.difficultyepoch, - &indexes.block.difficultyepoch_to_first_height, - |(i, h, ..)| (i, height_to_timestamp_iter.get_unwrap(h)), - exit, - )?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/chain/block/import.rs b/crates/brk_computer/src/chain/block/import.rs deleted file mode 100644 index 1f7540517..000000000 --- a/crates/brk_computer/src/chain/block/import.rs +++ /dev/null @@ -1,180 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::{StoredU64, Version}; -use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1}; - -use super::Vecs; -use crate::{ - chain::{ - TARGET_BLOCKS_PER_DAY, TARGET_BLOCKS_PER_DECADE, TARGET_BLOCKS_PER_MONTH, - TARGET_BLOCKS_PER_QUARTER, TARGET_BLOCKS_PER_SEMESTER, TARGET_BLOCKS_PER_WEEK, - TARGET_BLOCKS_PER_YEAR, - }, - grouped::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions}, - indexes, -}; - -impl Vecs { - pub fn forced_import( - db: &Database, - version: Version, - indexer: &Indexer, - indexes: &indexes::Vecs, - ) -> Result { - let v0 = Version::ZERO; - - let last = || VecBuilderOptions::default().add_last(); - let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative(); - let stats = || { - VecBuilderOptions::default() - .add_average() - .add_minmax() - .add_percentiles() - }; - let full_stats = || { - VecBuilderOptions::default() - .add_average() - .add_minmax() - .add_percentiles() - .add_sum() - .add_cumulative() - }; - - let dateindex_to_block_count_target = LazyVecFrom1::init( - "block_count_target", - version + v0, - indexes.time.dateindex_to_dateindex.boxed_clone(), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DAY)), - ); - let weekindex_to_block_count_target = LazyVecFrom1::init( - "block_count_target", - version + v0, - indexes.time.weekindex_to_weekindex.boxed_clone(), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_WEEK)), - ); - let monthindex_to_block_count_target = LazyVecFrom1::init( - "block_count_target", - version + v0, - indexes.time.monthindex_to_monthindex.boxed_clone(), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_MONTH)), - ); - let quarterindex_to_block_count_target = LazyVecFrom1::init( - "block_count_target", - version + v0, - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_QUARTER)), - ); - let semesterindex_to_block_count_target = LazyVecFrom1::init( - "block_count_target", - version + v0, - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_SEMESTER)), - ); - let yearindex_to_block_count_target = LazyVecFrom1::init( - "block_count_target", - version + v0, - indexes.time.yearindex_to_yearindex.boxed_clone(), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_YEAR)), - ); - let decadeindex_to_block_count_target = LazyVecFrom1::init( - "block_count_target", - version + v0, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DECADE)), - ); - - let height_to_interval = EagerVec::forced_import(db, "interval", version + v0)?; - let height_to_vbytes = EagerVec::forced_import(db, "vbytes", version + v0)?; - - Ok(Self { - dateindex_to_block_count_target, - weekindex_to_block_count_target, - monthindex_to_block_count_target, - quarterindex_to_block_count_target, - semesterindex_to_block_count_target, - yearindex_to_block_count_target, - decadeindex_to_block_count_target, - height_to_interval: height_to_interval.clone(), - height_to_24h_block_count: EagerVec::forced_import( - db, - "24h_block_count", - version + v0, - )?, - height_to_vbytes: height_to_vbytes.clone(), - indexes_to_block_count: ComputedVecsFromHeight::forced_import( - db, - "block_count", - Source::Compute, - version + v0, - indexes, - sum_cum(), - )?, - indexes_to_1w_block_count: ComputedVecsFromDateIndex::forced_import( - db, - "1w_block_count", - Source::Compute, - version + v0, - indexes, - last(), - )?, - indexes_to_1m_block_count: ComputedVecsFromDateIndex::forced_import( - db, - "1m_block_count", - Source::Compute, - version + v0, - indexes, - last(), - )?, - indexes_to_1y_block_count: ComputedVecsFromDateIndex::forced_import( - db, - "1y_block_count", - Source::Compute, - version + v0, - indexes, - last(), - )?, - indexes_to_block_interval: ComputedVecsFromHeight::forced_import( - db, - "block_interval", - Source::Vec(height_to_interval.boxed_clone()), - version + v0, - indexes, - stats(), - )?, - indexes_to_block_size: ComputedVecsFromHeight::forced_import( - db, - "block_size", - Source::Vec(indexer.vecs.block.height_to_total_size.boxed_clone()), - version + v0, - indexes, - full_stats(), - )?, - indexes_to_block_vbytes: ComputedVecsFromHeight::forced_import( - db, - "block_vbytes", - Source::Vec(height_to_vbytes.boxed_clone()), - version + v0, - indexes, - full_stats(), - )?, - indexes_to_block_weight: ComputedVecsFromHeight::forced_import( - db, - "block_weight", - Source::Vec(indexer.vecs.block.height_to_weight.boxed_clone()), - version + v0, - indexes, - full_stats(), - )?, - // Timestamp metrics (moved from epoch) - difficultyepoch_to_timestamp: EagerVec::forced_import(db, "timestamp", version + v0)?, - timeindexes_to_timestamp: ComputedVecsFromDateIndex::forced_import( - db, - "timestamp", - Source::Compute, - version + v0, - indexes, - VecBuilderOptions::default().add_first(), - )?, - }) - } -} diff --git a/crates/brk_computer/src/chain/block/vecs.rs b/crates/brk_computer/src/chain/block/vecs.rs deleted file mode 100644 index b0e41f94b..000000000 --- a/crates/brk_computer/src/chain/block/vecs.rs +++ /dev/null @@ -1,37 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex, - StoredU32, StoredU64, Timestamp, WeekIndex, Weight, YearIndex, -}; -use vecdb::{EagerVec, LazyVecFrom1, PcoVec}; - -use crate::grouped::{ComputedVecsFromDateIndex, ComputedVecsFromHeight}; - -/// Block-related metrics: count, interval, size, weight, vbytes, timestamps -#[derive(Clone, Traversable)] -pub struct Vecs { - pub dateindex_to_block_count_target: LazyVecFrom1, - pub weekindex_to_block_count_target: LazyVecFrom1, - pub monthindex_to_block_count_target: - LazyVecFrom1, - pub quarterindex_to_block_count_target: - LazyVecFrom1, - pub semesterindex_to_block_count_target: - LazyVecFrom1, - pub yearindex_to_block_count_target: LazyVecFrom1, - pub decadeindex_to_block_count_target: - LazyVecFrom1, - pub height_to_interval: EagerVec>, - pub height_to_24h_block_count: EagerVec>, - pub height_to_vbytes: EagerVec>, - pub indexes_to_block_count: ComputedVecsFromHeight, - pub indexes_to_1w_block_count: ComputedVecsFromDateIndex, - pub indexes_to_1m_block_count: ComputedVecsFromDateIndex, - pub indexes_to_1y_block_count: ComputedVecsFromDateIndex, - pub indexes_to_block_interval: ComputedVecsFromHeight, - pub indexes_to_block_size: ComputedVecsFromHeight, - pub indexes_to_block_vbytes: ComputedVecsFromHeight, - pub indexes_to_block_weight: ComputedVecsFromHeight, - pub difficultyepoch_to_timestamp: EagerVec>, - pub timeindexes_to_timestamp: ComputedVecsFromDateIndex, -} diff --git a/crates/brk_computer/src/chain/compute.rs b/crates/brk_computer/src/chain/compute.rs deleted file mode 100644 index fcf6f53be..000000000 --- a/crates/brk_computer/src/chain/compute.rs +++ /dev/null @@ -1,63 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use vecdb::Exit; - -use crate::{indexes, price, txins, ComputeIndexes}; - -use super::Vecs; - -impl Vecs { - pub fn compute( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - txins: &txins::Vecs, - starting_indexes: &ComputeIndexes, - price: Option<&price::Vecs>, - exit: &Exit, - ) -> Result<()> { - // Independent computations first - self.block.compute(indexer, indexes, starting_indexes, exit)?; - self.epoch.compute(indexes, starting_indexes, exit)?; - self.transaction.compute(indexer, indexes, txins, starting_indexes, price, exit)?; - - // Coinbase depends on block and transaction - self.coinbase.compute( - indexer, - indexes, - &self.block, - &self.transaction, - starting_indexes, - price, - exit, - )?; - - // Output type depends on transaction - self.output_type.compute(indexer, indexes, &self.transaction, starting_indexes, exit)?; - - // Volume depends on transaction and coinbase - self.volume.compute( - indexer, - indexes, - &self.transaction, - &self.coinbase, - starting_indexes, - price, - exit, - )?; - - // Mining depends on block and coinbase - self.mining.compute( - indexer, - indexes, - &self.block, - &self.coinbase, - starting_indexes, - exit, - )?; - - let _lock = exit.lock(); - self.db.compact()?; - Ok(()) - } -} diff --git a/crates/brk_computer/src/chain/epoch/vecs.rs b/crates/brk_computer/src/chain/epoch/vecs.rs deleted file mode 100644 index 507b4364e..000000000 --- a/crates/brk_computer/src/chain/epoch/vecs.rs +++ /dev/null @@ -1,16 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, HalvingEpoch, StoredF32, StoredU32}; - -use crate::grouped::{ComputedVecsFromDateIndex, ComputedVecsFromHeight}; - -/// Epoch metrics: difficulty epochs, halving epochs, and countdown to next epoch -#[derive(Clone, Traversable)] -pub struct Vecs { - pub indexes_to_difficultyepoch: ComputedVecsFromDateIndex, - pub indexes_to_halvingepoch: ComputedVecsFromDateIndex, - // Countdown metrics (moved from mining) - pub indexes_to_blocks_before_next_difficulty_adjustment: ComputedVecsFromHeight, - pub indexes_to_days_before_next_difficulty_adjustment: ComputedVecsFromHeight, - pub indexes_to_blocks_before_next_halving: ComputedVecsFromHeight, - pub indexes_to_days_before_next_halving: ComputedVecsFromHeight, -} diff --git a/crates/brk_computer/src/chain/output_type/import.rs b/crates/brk_computer/src/chain/output_type/import.rs deleted file mode 100644 index 2d42843b3..000000000 --- a/crates/brk_computer/src/chain/output_type/import.rs +++ /dev/null @@ -1,131 +0,0 @@ -use brk_error::Result; -use brk_types::Version; -use vecdb::Database; - -use super::Vecs; -use crate::{ - grouped::{ComputedVecsFromHeight, Source, VecBuilderOptions}, - indexes, -}; - -impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { - let v0 = Version::ZERO; - let last = || VecBuilderOptions::default().add_last(); - let full_stats = || { - VecBuilderOptions::default() - .add_average() - .add_minmax() - .add_percentiles() - .add_sum() - .add_cumulative() - }; - - Ok(Self { - indexes_to_p2a_count: ComputedVecsFromHeight::forced_import( - db, - "p2a_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_p2ms_count: ComputedVecsFromHeight::forced_import( - db, - "p2ms_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_p2pk33_count: ComputedVecsFromHeight::forced_import( - db, - "p2pk33_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_p2pk65_count: ComputedVecsFromHeight::forced_import( - db, - "p2pk65_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_p2pkh_count: ComputedVecsFromHeight::forced_import( - db, - "p2pkh_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_p2sh_count: ComputedVecsFromHeight::forced_import( - db, - "p2sh_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_p2tr_count: ComputedVecsFromHeight::forced_import( - db, - "p2tr_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_p2wpkh_count: ComputedVecsFromHeight::forced_import( - db, - "p2wpkh_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_p2wsh_count: ComputedVecsFromHeight::forced_import( - db, - "p2wsh_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_opreturn_count: ComputedVecsFromHeight::forced_import( - db, - "opreturn_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_emptyoutput_count: ComputedVecsFromHeight::forced_import( - db, - "emptyoutput_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_unknownoutput_count: ComputedVecsFromHeight::forced_import( - db, - "unknownoutput_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_exact_utxo_count: ComputedVecsFromHeight::forced_import( - db, - "exact_utxo_count", - Source::Compute, - version + v0, - indexes, - last(), - )?, - }) - } -} diff --git a/crates/brk_computer/src/chain/transaction/compute.rs b/crates/brk_computer/src/chain/transaction/compute.rs deleted file mode 100644 index 66fe64298..000000000 --- a/crates/brk_computer/src/chain/transaction/compute.rs +++ /dev/null @@ -1,141 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::{FeeRate, Sats, StoredU64, TxVersion}; -use vecdb::{Exit, TypedVecIterator, unlikely}; - -use super::Vecs; -use crate::{ComputeIndexes, grouped::ComputedVecsFromHeight, indexes, price, txins}; - -impl Vecs { - pub fn compute( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - txins: &txins::Vecs, - starting_indexes: &ComputeIndexes, - price: Option<&price::Vecs>, - exit: &Exit, - ) -> Result<()> { - self.indexes_to_tx_count - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_count_from_indexes( - starting_indexes.height, - &indexer.vecs.tx.height_to_first_txindex, - &indexer.vecs.tx.txindex_to_txid, - exit, - )?; - Ok(()) - })?; - - self.indexes_to_input_count.compute_rest( - indexer, - indexes, - starting_indexes, - exit, - Some(&indexes.transaction.txindex_to_input_count), - )?; - - self.indexes_to_output_count.compute_rest( - indexer, - indexes, - starting_indexes, - exit, - Some(&indexes.transaction.txindex_to_output_count), - )?; - - let compute_indexes_to_tx_vany = - |indexes_to_tx_vany: &mut ComputedVecsFromHeight, txversion: TxVersion| { - let mut txindex_to_txversion_iter = indexer.vecs.tx.txindex_to_txversion.iter()?; - indexes_to_tx_vany.compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_filtered_count_from_indexes( - starting_indexes.height, - &indexer.vecs.tx.height_to_first_txindex, - &indexer.vecs.tx.txindex_to_txid, - |txindex| { - let v = txindex_to_txversion_iter.get_unwrap(txindex); - v == txversion - }, - exit, - )?; - Ok(()) - }) - }; - compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v1, TxVersion::ONE)?; - compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v2, TxVersion::TWO)?; - compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v3, TxVersion::THREE)?; - - self.txindex_to_input_value.compute_sum_from_indexes( - starting_indexes.txindex, - &indexer.vecs.tx.txindex_to_first_txinindex, - &indexes.transaction.txindex_to_input_count, - &txins.txinindex_to_value, - exit, - )?; - - self.txindex_to_output_value.compute_sum_from_indexes( - starting_indexes.txindex, - &indexer.vecs.tx.txindex_to_first_txoutindex, - &indexes.transaction.txindex_to_output_count, - &indexer.vecs.txout.txoutindex_to_value, - exit, - )?; - - self.txindex_to_fee.compute_transform2( - starting_indexes.txindex, - &self.txindex_to_input_value, - &self.txindex_to_output_value, - |(i, input, output, ..)| { - let fee = if unlikely(input.is_max()) { - Sats::ZERO - } else { - input - output - }; - (i, fee) - }, - exit, - )?; - - self.txindex_to_fee_rate.compute_transform2( - starting_indexes.txindex, - &self.txindex_to_fee, - &self.txindex_to_vsize, - |(txindex, fee, vsize, ..)| (txindex, FeeRate::from((fee, vsize))), - exit, - )?; - - self.indexes_to_fee.compute_rest( - indexer, - indexes, - starting_indexes, - exit, - Some(&self.txindex_to_fee), - price, - )?; - - self.indexes_to_fee_rate.compute_rest( - indexer, - indexes, - starting_indexes, - exit, - Some(&self.txindex_to_fee_rate), - )?; - - self.indexes_to_tx_weight.compute_rest( - indexer, - indexes, - starting_indexes, - exit, - Some(&self.txindex_to_weight), - )?; - - self.indexes_to_tx_vsize.compute_rest( - indexer, - indexes, - starting_indexes, - exit, - Some(&self.txindex_to_vsize), - )?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/chain/transaction/import.rs b/crates/brk_computer/src/chain/transaction/import.rs deleted file mode 100644 index aab812bbe..000000000 --- a/crates/brk_computer/src/chain/transaction/import.rs +++ /dev/null @@ -1,180 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::{StoredBool, TxIndex, VSize, Version, Weight}; -use vecdb::{ - Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, LazyVecFrom2, VecIndex, -}; - -use super::Vecs; -use crate::{ - grouped::{ - ComputedValueVecsFromTxindex, ComputedVecsFromHeight, ComputedVecsFromTxindex, Source, - VecBuilderOptions, - }, - indexes, price, -}; - -impl Vecs { - pub fn forced_import( - db: &Database, - version: Version, - indexer: &Indexer, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, - ) -> Result { - let v0 = Version::ZERO; - - let stats = || { - VecBuilderOptions::default() - .add_average() - .add_minmax() - .add_percentiles() - }; - let full_stats = || { - VecBuilderOptions::default() - .add_average() - .add_minmax() - .add_percentiles() - .add_sum() - .add_cumulative() - }; - let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative(); - - let txindex_to_weight = LazyVecFrom2::init( - "weight", - version + v0, - indexer.vecs.tx.txindex_to_base_size.boxed_clone(), - indexer.vecs.tx.txindex_to_total_size.boxed_clone(), - |index: TxIndex, txindex_to_base_size_iter, txindex_to_total_size_iter| { - let index = index.to_usize(); - txindex_to_base_size_iter.get_at(index).map(|base_size| { - let total_size = txindex_to_total_size_iter.get_at_unwrap(index); - let wu = usize::from(base_size) * 3 + usize::from(total_size); - Weight::from(bitcoin::Weight::from_wu_usize(wu)) - }) - }, - ); - - let txindex_to_vsize = LazyVecFrom1::init( - "vsize", - version + v0, - txindex_to_weight.boxed_clone(), - |index: TxIndex, iter| iter.get(index).map(VSize::from), - ); - - let txindex_to_is_coinbase = LazyVecFrom2::init( - "is_coinbase", - version + v0, - indexer.vecs.tx.txindex_to_height.boxed_clone(), - indexer.vecs.tx.height_to_first_txindex.boxed_clone(), - |index: TxIndex, txindex_to_height_iter, height_to_first_txindex_iter| { - txindex_to_height_iter.get(index).map(|height| { - let txindex = height_to_first_txindex_iter.get_unwrap(height); - StoredBool::from(index == txindex) - }) - }, - ); - - let txindex_to_input_value = EagerVec::forced_import(db, "input_value", version + v0)?; - let txindex_to_output_value = EagerVec::forced_import(db, "output_value", version + v0)?; - let txindex_to_fee = EagerVec::forced_import(db, "fee", version + v0)?; - let txindex_to_fee_rate = EagerVec::forced_import(db, "fee_rate", version + v0)?; - - Ok(Self { - indexes_to_tx_count: ComputedVecsFromHeight::forced_import( - db, - "tx_count", - Source::Compute, - version + v0, - indexes, - full_stats(), - )?, - indexes_to_tx_v1: ComputedVecsFromHeight::forced_import( - db, - "tx_v1", - Source::Compute, - version + v0, - indexes, - sum_cum(), - )?, - indexes_to_tx_v2: ComputedVecsFromHeight::forced_import( - db, - "tx_v2", - Source::Compute, - version + v0, - indexes, - sum_cum(), - )?, - indexes_to_tx_v3: ComputedVecsFromHeight::forced_import( - db, - "tx_v3", - Source::Compute, - version + v0, - indexes, - sum_cum(), - )?, - indexes_to_tx_vsize: ComputedVecsFromTxindex::forced_import( - db, - "tx_vsize", - Source::Vec(txindex_to_vsize.boxed_clone()), - version + v0, - indexes, - stats(), - )?, - indexes_to_tx_weight: ComputedVecsFromTxindex::forced_import( - db, - "tx_weight", - Source::Vec(txindex_to_weight.boxed_clone()), - version + v0, - indexes, - stats(), - )?, - indexes_to_input_count: ComputedVecsFromTxindex::forced_import( - db, - "input_count", - Source::Vec(indexes.transaction.txindex_to_input_count.boxed_clone()), - version + v0, - indexes, - full_stats(), - )?, - indexes_to_output_count: ComputedVecsFromTxindex::forced_import( - db, - "output_count", - Source::Vec(indexes.transaction.txindex_to_output_count.boxed_clone()), - version + v0, - indexes, - full_stats(), - )?, - txindex_to_is_coinbase, - txindex_to_vsize, - txindex_to_weight, - txindex_to_input_value, - txindex_to_output_value, - txindex_to_fee: txindex_to_fee.clone(), - txindex_to_fee_rate: txindex_to_fee_rate.clone(), - indexes_to_fee: ComputedValueVecsFromTxindex::forced_import( - db, - "fee", - indexer, - indexes, - Source::Vec(txindex_to_fee.boxed_clone()), - version + v0, - price, - VecBuilderOptions::default() - .add_sum() - .add_cumulative() - .add_percentiles() - .add_minmax() - .add_average(), - )?, - indexes_to_fee_rate: ComputedVecsFromTxindex::forced_import( - db, - "fee_rate", - Source::Vec(txindex_to_fee_rate.boxed_clone()), - version + v0, - indexes, - stats(), - )?, - }) - } -} diff --git a/crates/brk_computer/src/chain/transaction/vecs.rs b/crates/brk_computer/src/chain/transaction/vecs.rs deleted file mode 100644 index 9d3607670..000000000 --- a/crates/brk_computer/src/chain/transaction/vecs.rs +++ /dev/null @@ -1,28 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{FeeRate, Height, Sats, StoredBool, StoredU32, StoredU64, TxIndex, VSize, Weight}; -use vecdb::{EagerVec, LazyVecFrom1, LazyVecFrom2, PcoVec}; - -use crate::grouped::{ComputedValueVecsFromTxindex, ComputedVecsFromHeight, ComputedVecsFromTxindex}; - -/// Transaction-related metrics -#[derive(Clone, Traversable)] -pub struct Vecs { - pub indexes_to_tx_count: ComputedVecsFromHeight, - pub indexes_to_tx_v1: ComputedVecsFromHeight, - pub indexes_to_tx_v2: ComputedVecsFromHeight, - pub indexes_to_tx_v3: ComputedVecsFromHeight, - pub indexes_to_tx_vsize: ComputedVecsFromTxindex, - pub indexes_to_tx_weight: ComputedVecsFromTxindex, - pub indexes_to_input_count: ComputedVecsFromTxindex, - pub indexes_to_output_count: ComputedVecsFromTxindex, - pub txindex_to_is_coinbase: LazyVecFrom2, - pub txindex_to_vsize: LazyVecFrom1, - pub txindex_to_weight: LazyVecFrom2, - /// Value == 0 when Coinbase - pub txindex_to_input_value: EagerVec>, - pub txindex_to_output_value: EagerVec>, - pub txindex_to_fee: EagerVec>, - pub txindex_to_fee_rate: EagerVec>, - pub indexes_to_fee: ComputedValueVecsFromTxindex, - pub indexes_to_fee_rate: ComputedVecsFromTxindex, -} diff --git a/crates/brk_computer/src/cointime.rs b/crates/brk_computer/src/cointime.rs deleted file mode 100644 index 07d93fe3a..000000000 --- a/crates/brk_computer/src/cointime.rs +++ /dev/null @@ -1,588 +0,0 @@ -use std::path::Path; - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Bitcoin, CheckedSub, Dollars, StoredF32, StoredF64, Version}; -use vecdb::{Database, Exit, PAGE_SIZE, TypedVecIterator}; - -use crate::{grouped::ComputedVecsFromDateIndex, utils::OptionExt}; - -use super::{ - ComputeIndexes, chain, - grouped::{ - ComputedRatioVecsFromDateIndex, ComputedValueVecsFromHeight, ComputedVecsFromHeight, - Source, VecBuilderOptions, - }, - indexes, price, stateful, -}; - -pub const DB_NAME: &str = "cointime"; - -#[derive(Clone, Traversable)] -pub struct Vecs { - db: Database, - - pub indexes_to_coinblocks_created: ComputedVecsFromHeight, - pub indexes_to_coinblocks_stored: ComputedVecsFromHeight, - pub indexes_to_liveliness: ComputedVecsFromHeight, - pub indexes_to_vaultedness: ComputedVecsFromHeight, - pub indexes_to_activity_to_vaultedness_ratio: ComputedVecsFromHeight, - pub indexes_to_vaulted_supply: ComputedValueVecsFromHeight, - pub indexes_to_active_supply: ComputedValueVecsFromHeight, - pub indexes_to_thermo_cap: ComputedVecsFromHeight, - pub indexes_to_investor_cap: ComputedVecsFromHeight, - pub indexes_to_vaulted_cap: ComputedVecsFromHeight, - pub indexes_to_active_cap: ComputedVecsFromHeight, - pub indexes_to_vaulted_price: ComputedVecsFromHeight, - pub indexes_to_vaulted_price_ratio: ComputedRatioVecsFromDateIndex, - pub indexes_to_active_price: ComputedVecsFromHeight, - pub indexes_to_active_price_ratio: ComputedRatioVecsFromDateIndex, - pub indexes_to_true_market_mean: ComputedVecsFromHeight, - pub indexes_to_true_market_mean_ratio: ComputedRatioVecsFromDateIndex, - pub indexes_to_cointime_value_destroyed: ComputedVecsFromHeight, - pub indexes_to_cointime_value_created: ComputedVecsFromHeight, - pub indexes_to_cointime_value_stored: ComputedVecsFromHeight, - pub indexes_to_cointime_price: ComputedVecsFromHeight, - pub indexes_to_cointime_cap: ComputedVecsFromHeight, - pub indexes_to_cointime_price_ratio: ComputedRatioVecsFromDateIndex, - pub indexes_to_cointime_adj_inflation_rate: ComputedVecsFromDateIndex, - pub indexes_to_cointime_adj_tx_btc_velocity: ComputedVecsFromDateIndex, - pub indexes_to_cointime_adj_tx_usd_velocity: ComputedVecsFromDateIndex, - // pub indexes_to_thermo_cap_rel_to_investor_cap: ComputedValueVecsFromHeight, -} - -impl Vecs { - pub fn forced_import( - parent_path: &Path, - parent_version: Version, - indexes: &indexes::Vecs, - price: Option<&price::Vecs>, - ) -> Result { - let db = Database::open(&parent_path.join(DB_NAME))?; - db.set_min_len(PAGE_SIZE * 1_000_000)?; - - let compute_dollars = price.is_some(); - let v0 = parent_version; - let v1 = parent_version + Version::ONE; - - let last = || VecBuilderOptions::default().add_last(); - let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative(); - - macro_rules! computed_h { - ($name:expr, $opts:expr) => { - ComputedVecsFromHeight::forced_import( - &db, - $name, - Source::Compute, - v0, - indexes, - $opts, - )? - }; - ($name:expr, $v:expr, $opts:expr) => { - ComputedVecsFromHeight::forced_import( - &db, - $name, - Source::Compute, - $v, - indexes, - $opts, - )? - }; - } - macro_rules! computed_di { - ($name:expr, $opts:expr) => { - ComputedVecsFromDateIndex::forced_import( - &db, - $name, - Source::Compute, - v0, - indexes, - $opts, - )? - }; - } - macro_rules! ratio_di { - ($name:expr, $source:expr) => { - ComputedRatioVecsFromDateIndex::forced_import( - &db, - $name, - Some($source), - v0, - indexes, - true, - price, - )? - }; - } - macro_rules! value_h { - ($name:expr) => { - ComputedValueVecsFromHeight::forced_import( - &db, - $name, - Source::Compute, - v1, - last(), - compute_dollars, - indexes, - )? - }; - } - - // Extract price vecs before struct literal so they can be used as sources for ratios - let indexes_to_vaulted_price = computed_h!("vaulted_price", last()); - let indexes_to_active_price = computed_h!("active_price", last()); - let indexes_to_true_market_mean = computed_h!("true_market_mean", last()); - let indexes_to_cointime_price = computed_h!("cointime_price", last()); - - let this = Self { - indexes_to_coinblocks_created: computed_h!("coinblocks_created", sum_cum()), - indexes_to_coinblocks_stored: computed_h!("coinblocks_stored", sum_cum()), - indexes_to_liveliness: computed_h!("liveliness", last()), - indexes_to_vaultedness: computed_h!("vaultedness", last()), - indexes_to_activity_to_vaultedness_ratio: computed_h!( - "activity_to_vaultedness_ratio", - last() - ), - indexes_to_vaulted_supply: value_h!("vaulted_supply"), - indexes_to_active_supply: value_h!("active_supply"), - indexes_to_thermo_cap: computed_h!("thermo_cap", v1, last()), - indexes_to_investor_cap: computed_h!("investor_cap", v1, last()), - indexes_to_vaulted_cap: computed_h!("vaulted_cap", v1, last()), - indexes_to_active_cap: computed_h!("active_cap", v1, last()), - indexes_to_vaulted_price_ratio: ratio_di!("vaulted_price", &indexes_to_vaulted_price), - indexes_to_vaulted_price, - indexes_to_active_price_ratio: ratio_di!("active_price", &indexes_to_active_price), - indexes_to_active_price, - indexes_to_true_market_mean_ratio: ratio_di!( - "true_market_mean", - &indexes_to_true_market_mean - ), - indexes_to_true_market_mean, - indexes_to_cointime_value_destroyed: computed_h!("cointime_value_destroyed", sum_cum()), - indexes_to_cointime_value_created: computed_h!("cointime_value_created", sum_cum()), - indexes_to_cointime_value_stored: computed_h!("cointime_value_stored", sum_cum()), - indexes_to_cointime_cap: computed_h!("cointime_cap", last()), - indexes_to_cointime_price_ratio: ratio_di!( - "cointime_price", - &indexes_to_cointime_price - ), - indexes_to_cointime_price, - indexes_to_cointime_adj_inflation_rate: computed_di!( - "cointime_adj_inflation_rate", - last() - ), - indexes_to_cointime_adj_tx_btc_velocity: computed_di!( - "cointime_adj_tx_btc_velocity", - last() - ), - indexes_to_cointime_adj_tx_usd_velocity: computed_di!( - "cointime_adj_tx_usd_velocity", - last() - ), - - db, - }; - - this.db.retain_regions( - this.iter_any_exportable() - .flat_map(|v| v.region_names()) - .collect(), - )?; - this.db.compact()?; - - Ok(this) - } - - #[allow(clippy::too_many_arguments)] - pub fn compute( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - price: Option<&price::Vecs>, - chain: &chain::Vecs, - stateful: &stateful::Vecs, - exit: &Exit, - ) -> Result<()> { - self.compute_(indexes, starting_indexes, price, chain, stateful, exit)?; - let _lock = exit.lock(); - self.db.compact()?; - Ok(()) - } - - #[allow(clippy::too_many_arguments)] - fn compute_( - &mut self, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - price: Option<&price::Vecs>, - chain: &chain::Vecs, - stateful: &stateful::Vecs, - exit: &Exit, - ) -> Result<()> { - let circulating_supply = &stateful.utxo_cohorts.all.metrics.supply.height_to_supply; - - self.indexes_to_coinblocks_created - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_transform( - starting_indexes.height, - circulating_supply, - |(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))), - exit, - )?; - Ok(()) - })?; - - let indexes_to_coinblocks_destroyed = &stateful - .utxo_cohorts - .all - .metrics - .activity - .indexes_to_coinblocks_destroyed; - - self.indexes_to_coinblocks_stored - .compute_all(indexes, starting_indexes, exit, |vec| { - let mut coinblocks_destroyed_iter = indexes_to_coinblocks_destroyed - .height - .as_ref() - .unwrap() - .into_iter(); - vec.compute_transform( - starting_indexes.height, - self.indexes_to_coinblocks_created.height.u(), - |(i, created, ..)| { - let destroyed = coinblocks_destroyed_iter.get_unwrap(i); - (i, created.checked_sub(destroyed).unwrap()) - }, - exit, - )?; - Ok(()) - })?; - - self.indexes_to_liveliness - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - indexes_to_coinblocks_destroyed - .height_extra - .unwrap_cumulative(), - self.indexes_to_coinblocks_created - .height_extra - .unwrap_cumulative(), - exit, - )?; - Ok(()) - })?; - let liveliness = &self.indexes_to_liveliness; - - self.indexes_to_vaultedness - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_transform( - starting_indexes.height, - liveliness.height.u(), - |(i, v, ..)| (i, StoredF64::from(1.0).checked_sub(v).unwrap()), - exit, - )?; - Ok(()) - })?; - let vaultedness = &self.indexes_to_vaultedness; - - self.indexes_to_activity_to_vaultedness_ratio.compute_all( - indexes, - starting_indexes, - exit, - |vec| { - vec.compute_divide( - starting_indexes.height, - liveliness.height.u(), - vaultedness.height.u(), - exit, - )?; - Ok(()) - }, - )?; - - self.indexes_to_vaulted_supply.compute_all( - indexes, - price, - starting_indexes, - exit, - |vec| { - vec.compute_multiply( - starting_indexes.height, - circulating_supply, - vaultedness.height.u(), - exit, - )?; - Ok(()) - }, - )?; - - self.indexes_to_active_supply.compute_all( - indexes, - price, - starting_indexes, - exit, - |vec| { - vec.compute_multiply( - starting_indexes.height, - circulating_supply, - liveliness.height.u(), - exit, - )?; - Ok(()) - }, - )?; - - self.indexes_to_cointime_adj_inflation_rate - .compute_all(starting_indexes, exit, |v| { - v.compute_multiply( - starting_indexes.dateindex, - self.indexes_to_activity_to_vaultedness_ratio - .dateindex - .unwrap_last(), - chain.coinbase.indexes_to_inflation_rate.dateindex.u(), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_cointime_adj_tx_btc_velocity - .compute_all(starting_indexes, exit, |v| { - v.compute_multiply( - starting_indexes.dateindex, - self.indexes_to_activity_to_vaultedness_ratio - .dateindex - .unwrap_last(), - chain.volume.indexes_to_tx_btc_velocity.dateindex.u(), - exit, - )?; - Ok(()) - })?; - - if let Some(price) = price { - let realized_cap = &stateful - .utxo_cohorts - .all - .metrics - .realized - .u() - .height_to_realized_cap; - let realized_price = stateful - .utxo_cohorts - .all - .metrics - .realized - .u() - .indexes_to_realized_price - .height - .u(); - - self.indexes_to_thermo_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_transform( - starting_indexes.height, - chain - .coinbase.indexes_to_subsidy - .dollars - .as_ref() - .unwrap() - .height_extra - .unwrap_cumulative(), - |(i, v, ..)| (i, v), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_investor_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_subtract( - starting_indexes.height, - realized_cap, - self.indexes_to_thermo_cap.height.u(), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_vaulted_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - realized_cap, - self.indexes_to_vaultedness.height.u(), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_active_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_multiply( - starting_indexes.height, - realized_cap, - self.indexes_to_liveliness.height.u(), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_vaulted_price - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - realized_price, - self.indexes_to_vaultedness.height.u(), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_vaulted_price_ratio.compute_rest( - price, - starting_indexes, - exit, - Some(self.indexes_to_vaulted_price.dateindex.unwrap_last()), - )?; - - self.indexes_to_active_price - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_multiply( - starting_indexes.height, - realized_price, - self.indexes_to_liveliness.height.u(), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_active_price_ratio.compute_rest( - price, - starting_indexes, - exit, - Some(self.indexes_to_active_price.dateindex.unwrap_last()), - )?; - - self.indexes_to_true_market_mean.compute_all( - indexes, - starting_indexes, - exit, - |vec| { - vec.compute_divide( - starting_indexes.height, - self.indexes_to_investor_cap.height.u(), - &self.indexes_to_active_supply.bitcoin.height, - exit, - )?; - Ok(()) - }, - )?; - - self.indexes_to_true_market_mean_ratio.compute_rest( - price, - starting_indexes, - exit, - Some(self.indexes_to_true_market_mean.dateindex.unwrap_last()), - )?; - - self.indexes_to_cointime_value_destroyed.compute_all( - indexes, - starting_indexes, - exit, - |vec| { - // TODO: Another example when the callback should be applied to each index, instead of to base then merging from more granular to less - // The price taken won't be correct for time based indexes - vec.compute_multiply( - starting_indexes.height, - &price.chainindexes_to_price_close.height, - indexes_to_coinblocks_destroyed.height.u(), - exit, - )?; - Ok(()) - }, - )?; - - self.indexes_to_cointime_value_created.compute_all( - indexes, - starting_indexes, - exit, - |vec| { - vec.compute_multiply( - starting_indexes.height, - &price.chainindexes_to_price_close.height, - self.indexes_to_coinblocks_created.height.u(), - exit, - )?; - Ok(()) - }, - )?; - - self.indexes_to_cointime_value_stored.compute_all( - indexes, - starting_indexes, - exit, - |vec| { - vec.compute_multiply( - starting_indexes.height, - &price.chainindexes_to_price_close.height, - self.indexes_to_coinblocks_stored.height.u(), - exit, - )?; - Ok(()) - }, - )?; - - self.indexes_to_cointime_price - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_divide( - starting_indexes.height, - self.indexes_to_cointime_value_destroyed - .height_extra - .unwrap_cumulative(), - self.indexes_to_coinblocks_stored - .height_extra - .unwrap_cumulative(), - exit, - )?; - Ok(()) - })?; - - self.indexes_to_cointime_cap - .compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_multiply( - starting_indexes.height, - self.indexes_to_cointime_price.height.u(), - circulating_supply, - exit, - )?; - Ok(()) - })?; - - self.indexes_to_cointime_price_ratio.compute_rest( - price, - starting_indexes, - exit, - Some(self.indexes_to_cointime_price.dateindex.unwrap_last()), - )?; - - self.indexes_to_cointime_adj_tx_usd_velocity.compute_all( - starting_indexes, - exit, - |v| { - v.compute_multiply( - starting_indexes.dateindex, - self.indexes_to_activity_to_vaultedness_ratio - .dateindex - .unwrap_last(), - chain.volume.indexes_to_tx_usd_velocity.dateindex.u(), - exit, - )?; - Ok(()) - }, - )?; - } - - Ok(()) - } -} diff --git a/crates/brk_computer/src/cointime/activity/compute.rs b/crates/brk_computer/src/cointime/activity/compute.rs new file mode 100644 index 000000000..95af18156 --- /dev/null +++ b/crates/brk_computer/src/cointime/activity/compute.rs @@ -0,0 +1,98 @@ +use brk_error::Result; +use brk_types::{Bitcoin, CheckedSub, StoredF64}; +use vecdb::{Exit, TypedVecIterator}; + +use super::Vecs; +use crate::{distribution, indexes, utils::OptionExt, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + distribution: &distribution::Vecs, + exit: &Exit, + ) -> Result<()> { + let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply; + + self.indexes_to_coinblocks_created + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_transform( + starting_indexes.height, + circulating_supply, + |(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))), + exit, + )?; + Ok(()) + })?; + + let indexes_to_coinblocks_destroyed = &distribution + .utxo_cohorts + .all + .metrics + .activity + .indexes_to_coinblocks_destroyed; + + self.indexes_to_coinblocks_stored + .compute_all(indexes, starting_indexes, exit, |vec| { + let mut coinblocks_destroyed_iter = indexes_to_coinblocks_destroyed + .height + .as_ref() + .unwrap() + .into_iter(); + vec.compute_transform( + starting_indexes.height, + self.indexes_to_coinblocks_created.height.u(), + |(i, created, ..)| { + let destroyed = coinblocks_destroyed_iter.get_unwrap(i); + (i, created.checked_sub(destroyed).unwrap()) + }, + exit, + )?; + Ok(()) + })?; + + self.indexes_to_liveliness + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_divide( + starting_indexes.height, + indexes_to_coinblocks_destroyed + .height_extra + .unwrap_cumulative(), + self.indexes_to_coinblocks_created + .height_extra + .unwrap_cumulative(), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_vaultedness + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_transform( + starting_indexes.height, + self.indexes_to_liveliness.height.u(), + |(i, v, ..)| (i, StoredF64::from(1.0).checked_sub(v).unwrap()), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_activity_to_vaultedness_ratio.compute_all( + indexes, + starting_indexes, + exit, + |vec| { + vec.compute_divide( + starting_indexes.height, + self.indexes_to_liveliness.height.u(), + self.indexes_to_vaultedness.height.u(), + exit, + )?; + Ok(()) + }, + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/cointime/activity/import.rs b/crates/brk_computer/src/cointime/activity/import.rs new file mode 100644 index 000000000..696088a15 --- /dev/null +++ b/crates/brk_computer/src/cointime/activity/import.rs @@ -0,0 +1,40 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + let last = || VecBuilderOptions::default().add_last(); + let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative(); + + macro_rules! computed_h { + ($name:expr, $opts:expr) => { + ComputedVecsFromHeight::forced_import( + db, + $name, + Source::Compute, + version, + indexes, + $opts, + )? + }; + } + + Ok(Self { + indexes_to_coinblocks_created: computed_h!("coinblocks_created", sum_cum()), + indexes_to_coinblocks_stored: computed_h!("coinblocks_stored", sum_cum()), + indexes_to_liveliness: computed_h!("liveliness", last()), + indexes_to_vaultedness: computed_h!("vaultedness", last()), + indexes_to_activity_to_vaultedness_ratio: computed_h!( + "activity_to_vaultedness_ratio", + last() + ), + }) + } +} diff --git a/crates/brk_computer/src/cointime/activity/mod.rs b/crates/brk_computer/src/cointime/activity/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/cointime/activity/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/cointime/activity/vecs.rs b/crates/brk_computer/src/cointime/activity/vecs.rs new file mode 100644 index 000000000..a7bddcfb1 --- /dev/null +++ b/crates/brk_computer/src/cointime/activity/vecs.rs @@ -0,0 +1,13 @@ +use brk_traversable::Traversable; +use brk_types::StoredF64; + +use crate::internal::ComputedVecsFromHeight; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_coinblocks_created: ComputedVecsFromHeight, + pub indexes_to_coinblocks_stored: ComputedVecsFromHeight, + pub indexes_to_liveliness: ComputedVecsFromHeight, + pub indexes_to_vaultedness: ComputedVecsFromHeight, + pub indexes_to_activity_to_vaultedness_ratio: ComputedVecsFromHeight, +} diff --git a/crates/brk_computer/src/cointime/adjusted/compute.rs b/crates/brk_computer/src/cointime/adjusted/compute.rs new file mode 100644 index 000000000..7208b2af3 --- /dev/null +++ b/crates/brk_computer/src/cointime/adjusted/compute.rs @@ -0,0 +1,66 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::Vecs; +use super::super::activity; +use crate::{supply, ComputeIndexes, utils::OptionExt}; + +impl Vecs { + pub fn compute( + &mut self, + starting_indexes: &ComputeIndexes, + supply: &supply::Vecs, + activity: &activity::Vecs, + has_price: bool, + exit: &Exit, + ) -> Result<()> { + self.indexes_to_cointime_adj_inflation_rate + .compute_all(starting_indexes, exit, |v| { + v.compute_multiply( + starting_indexes.dateindex, + activity + .indexes_to_activity_to_vaultedness_ratio + .dateindex + .unwrap_last(), + supply.inflation.indexes.dateindex.u(), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_cointime_adj_tx_btc_velocity + .compute_all(starting_indexes, exit, |v| { + v.compute_multiply( + starting_indexes.dateindex, + activity + .indexes_to_activity_to_vaultedness_ratio + .dateindex + .unwrap_last(), + supply.velocity.indexes_to_btc.dateindex.u(), + exit, + )?; + Ok(()) + })?; + + if has_price { + self.indexes_to_cointime_adj_tx_usd_velocity.compute_all( + starting_indexes, + exit, + |v| { + v.compute_multiply( + starting_indexes.dateindex, + activity + .indexes_to_activity_to_vaultedness_ratio + .dateindex + .unwrap_last(), + supply.velocity.indexes_to_usd.u().dateindex.u(), + exit, + )?; + Ok(()) + }, + )?; + } + + Ok(()) + } +} diff --git a/crates/brk_computer/src/cointime/adjusted/import.rs b/crates/brk_computer/src/cointime/adjusted/import.rs new file mode 100644 index 000000000..87c4d9b47 --- /dev/null +++ b/crates/brk_computer/src/cointime/adjusted/import.rs @@ -0,0 +1,34 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromDateIndex, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + let last = || VecBuilderOptions::default().add_last(); + + macro_rules! computed_di { + ($name:expr) => { + ComputedVecsFromDateIndex::forced_import( + db, + $name, + Source::Compute, + version, + indexes, + last(), + )? + }; + } + + Ok(Self { + indexes_to_cointime_adj_inflation_rate: computed_di!("cointime_adj_inflation_rate"), + indexes_to_cointime_adj_tx_btc_velocity: computed_di!("cointime_adj_tx_btc_velocity"), + indexes_to_cointime_adj_tx_usd_velocity: computed_di!("cointime_adj_tx_usd_velocity"), + }) + } +} diff --git a/crates/brk_computer/src/cointime/adjusted/mod.rs b/crates/brk_computer/src/cointime/adjusted/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/cointime/adjusted/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/cointime/adjusted/vecs.rs b/crates/brk_computer/src/cointime/adjusted/vecs.rs new file mode 100644 index 000000000..c0cc1fc64 --- /dev/null +++ b/crates/brk_computer/src/cointime/adjusted/vecs.rs @@ -0,0 +1,11 @@ +use brk_traversable::Traversable; +use brk_types::{StoredF32, StoredF64}; + +use crate::internal::ComputedVecsFromDateIndex; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_cointime_adj_inflation_rate: ComputedVecsFromDateIndex, + pub indexes_to_cointime_adj_tx_btc_velocity: ComputedVecsFromDateIndex, + pub indexes_to_cointime_adj_tx_usd_velocity: ComputedVecsFromDateIndex, +} diff --git a/crates/brk_computer/src/cointime/cap/compute.rs b/crates/brk_computer/src/cointime/cap/compute.rs new file mode 100644 index 000000000..ba1f240b6 --- /dev/null +++ b/crates/brk_computer/src/cointime/cap/compute.rs @@ -0,0 +1,115 @@ +use brk_error::Result; +use brk_types::Dollars; +use vecdb::Exit; + +use super::super::{activity, value}; +use super::Vecs; +use crate::{ComputeIndexes, blocks, distribution, indexes, utils::OptionExt}; + +impl Vecs { + #[allow(clippy::too_many_arguments)] + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + blocks: &blocks::Vecs, + distribution: &distribution::Vecs, + activity: &activity::Vecs, + value: &value::Vecs, + exit: &Exit, + ) -> Result<()> { + let realized_cap = &distribution + .utxo_cohorts + .all + .metrics + .realized + .u() + .height_to_realized_cap; + + let circulating_supply = &distribution + .utxo_cohorts + .all + .metrics + .supply + .height_to_supply_value + .bitcoin; + + self.indexes_to_thermo_cap + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_transform( + starting_indexes.height, + blocks + .rewards + .indexes_to_subsidy + .dollars + .as_ref() + .unwrap() + .height_extra + .unwrap_cumulative(), + |(i, v, ..)| (i, v), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_investor_cap + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_subtract( + starting_indexes.height, + realized_cap, + self.indexes_to_thermo_cap.height.u(), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_vaulted_cap + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_divide( + starting_indexes.height, + realized_cap, + activity.indexes_to_vaultedness.height.u(), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_active_cap + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_multiply( + starting_indexes.height, + realized_cap, + activity.indexes_to_liveliness.height.u(), + exit, + )?; + Ok(()) + })?; + + // cointime_cap = (cointime_value_destroyed_cumulative * circulating_supply) / coinblocks_stored_cumulative + self.indexes_to_cointime_cap + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_transform3( + starting_indexes.height, + value + .indexes_to_cointime_value_destroyed + .height_extra + .unwrap_cumulative(), + circulating_supply, + activity + .indexes_to_coinblocks_stored + .height_extra + .unwrap_cumulative(), + |(i, destroyed, supply, stored, ..)| { + let destroyed: f64 = *destroyed; + let supply: f64 = supply.into(); + let stored: f64 = *stored; + (i, Dollars::from(destroyed * supply / stored)) + }, + exit, + )?; + Ok(()) + })?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/cointime/cap/import.rs b/crates/brk_computer/src/cointime/cap/import.rs new file mode 100644 index 000000000..d78610831 --- /dev/null +++ b/crates/brk_computer/src/cointime/cap/import.rs @@ -0,0 +1,36 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + let last = || VecBuilderOptions::default().add_last(); + + macro_rules! computed_h { + ($name:expr) => { + ComputedVecsFromHeight::forced_import( + db, + $name, + Source::Compute, + version, + indexes, + last(), + )? + }; + } + + Ok(Self { + indexes_to_thermo_cap: computed_h!("thermo_cap"), + indexes_to_investor_cap: computed_h!("investor_cap"), + indexes_to_vaulted_cap: computed_h!("vaulted_cap"), + indexes_to_active_cap: computed_h!("active_cap"), + indexes_to_cointime_cap: computed_h!("cointime_cap"), + }) + } +} diff --git a/crates/brk_computer/src/cointime/cap/mod.rs b/crates/brk_computer/src/cointime/cap/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/cointime/cap/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/cointime/cap/vecs.rs b/crates/brk_computer/src/cointime/cap/vecs.rs new file mode 100644 index 000000000..aa091e6e8 --- /dev/null +++ b/crates/brk_computer/src/cointime/cap/vecs.rs @@ -0,0 +1,13 @@ +use brk_traversable::Traversable; +use brk_types::Dollars; + +use crate::internal::ComputedVecsFromHeight; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_thermo_cap: ComputedVecsFromHeight, + pub indexes_to_investor_cap: ComputedVecsFromHeight, + pub indexes_to_vaulted_cap: ComputedVecsFromHeight, + pub indexes_to_active_cap: ComputedVecsFromHeight, + pub indexes_to_cointime_cap: ComputedVecsFromHeight, +} diff --git a/crates/brk_computer/src/cointime/compute.rs b/crates/brk_computer/src/cointime/compute.rs new file mode 100644 index 000000000..6e4970852 --- /dev/null +++ b/crates/brk_computer/src/cointime/compute.rs @@ -0,0 +1,82 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::Vecs; +use crate::{blocks, distribution, indexes, price, supply, ComputeIndexes}; + +impl Vecs { + #[allow(clippy::too_many_arguments)] + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + price: Option<&price::Vecs>, + blocks: &blocks::Vecs, + supply_vecs: &supply::Vecs, + distribution: &distribution::Vecs, + exit: &Exit, + ) -> Result<()> { + // Activity computes first (liveliness, vaultedness, etc.) + self.activity + .compute(indexes, starting_indexes, distribution, exit)?; + + // Supply computes next (depends on activity) + self.supply.compute( + indexes, + starting_indexes, + price, + distribution, + &self.activity, + exit, + )?; + + // Adjusted velocity metrics (BTC) - can compute without price + self.adjusted.compute( + starting_indexes, + supply_vecs, + &self.activity, + price.is_some(), + exit, + )?; + + // Price-dependent metrics + if let Some(price) = price { + // Value computes (cointime value destroyed/created/stored) + self.value.compute( + indexes, + starting_indexes, + price, + distribution, + &self.activity, + exit, + )?; + + // Cap computes (thermo, investor, vaulted, active, cointime caps) + self.cap.compute( + indexes, + starting_indexes, + blocks, + distribution, + &self.activity, + &self.value, + exit, + )?; + + // Pricing computes (all prices derived from caps) + self.pricing.compute( + indexes, + starting_indexes, + price, + distribution, + &self.activity, + &self.supply, + &self.cap, + exit, + )?; + } + + let _lock = exit.lock(); + self.db.compact()?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/cointime/import.rs b/crates/brk_computer/src/cointime/import.rs new file mode 100644 index 000000000..9b9214e00 --- /dev/null +++ b/crates/brk_computer/src/cointime/import.rs @@ -0,0 +1,53 @@ +use std::path::Path; + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::Version; +use vecdb::{Database, PAGE_SIZE}; + +use super::{ + ActivityVecs, AdjustedVecs, CapVecs, PricingVecs, SupplyVecs, ValueVecs, Vecs, DB_NAME, +}; +use crate::{indexes, price}; + +impl Vecs { + pub fn forced_import( + parent_path: &Path, + parent_version: Version, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + ) -> Result { + let db = Database::open(&parent_path.join(DB_NAME))?; + db.set_min_len(PAGE_SIZE * 1_000_000)?; + + let compute_dollars = price.is_some(); + let v0 = parent_version; + let v1 = parent_version + Version::ONE; + + let activity = ActivityVecs::forced_import(&db, v0, indexes)?; + let supply = SupplyVecs::forced_import(&db, v1, indexes, compute_dollars)?; + let value = ValueVecs::forced_import(&db, v1, indexes)?; + let cap = CapVecs::forced_import(&db, v1, indexes)?; + let pricing = PricingVecs::forced_import(&db, v0, indexes, price)?; + let adjusted = AdjustedVecs::forced_import(&db, v0, indexes)?; + + let this = Self { + db, + activity, + supply, + value, + cap, + pricing, + adjusted, + }; + + this.db.retain_regions( + this.iter_any_exportable() + .flat_map(|v| v.region_names()) + .collect(), + )?; + this.db.compact()?; + + Ok(this) + } +} diff --git a/crates/brk_computer/src/cointime/mod.rs b/crates/brk_computer/src/cointime/mod.rs new file mode 100644 index 000000000..377ee45f0 --- /dev/null +++ b/crates/brk_computer/src/cointime/mod.rs @@ -0,0 +1,34 @@ +pub mod activity; +pub mod adjusted; +pub mod cap; +pub mod pricing; +pub mod supply; +pub mod value; + +mod compute; +mod import; + +use brk_traversable::Traversable; +use vecdb::Database; + +pub use activity::Vecs as ActivityVecs; +pub use adjusted::Vecs as AdjustedVecs; +pub use cap::Vecs as CapVecs; +pub use pricing::Vecs as PricingVecs; +pub use supply::Vecs as SupplyVecs; +pub use value::Vecs as ValueVecs; + +pub const DB_NAME: &str = "cointime"; + +#[derive(Clone, Traversable)] +pub struct Vecs { + #[traversable(skip)] + pub(crate) db: Database, + + pub activity: ActivityVecs, + pub supply: SupplyVecs, + pub value: ValueVecs, + pub cap: CapVecs, + pub pricing: PricingVecs, + pub adjusted: AdjustedVecs, +} diff --git a/crates/brk_computer/src/cointime/pricing/compute.rs b/crates/brk_computer/src/cointime/pricing/compute.rs new file mode 100644 index 000000000..d98b11832 --- /dev/null +++ b/crates/brk_computer/src/cointime/pricing/compute.rs @@ -0,0 +1,111 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::super::{activity, cap, supply}; +use super::Vecs; +use crate::{distribution, indexes, price, utils::OptionExt, ComputeIndexes}; + +impl Vecs { + #[allow(clippy::too_many_arguments)] + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + price: &price::Vecs, + distribution: &distribution::Vecs, + activity: &activity::Vecs, + supply: &supply::Vecs, + cap: &cap::Vecs, + exit: &Exit, + ) -> Result<()> { + let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply_value.bitcoin; + let realized_price = distribution + .utxo_cohorts + .all + .metrics + .realized + .u() + .indexes_to_realized_price + .height + .u(); + + self.indexes_to_vaulted_price + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_divide( + starting_indexes.height, + realized_price, + activity.indexes_to_vaultedness.height.u(), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_vaulted_price_ratio.compute_rest( + price, + starting_indexes, + exit, + Some(self.indexes_to_vaulted_price.dateindex.unwrap_last()), + )?; + + self.indexes_to_active_price + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_multiply( + starting_indexes.height, + realized_price, + activity.indexes_to_liveliness.height.u(), + exit, + )?; + Ok(()) + })?; + + self.indexes_to_active_price_ratio.compute_rest( + price, + starting_indexes, + exit, + Some(self.indexes_to_active_price.dateindex.unwrap_last()), + )?; + + self.indexes_to_true_market_mean.compute_all( + indexes, + starting_indexes, + exit, + |vec| { + vec.compute_divide( + starting_indexes.height, + cap.indexes_to_investor_cap.height.u(), + &supply.indexes_to_active_supply.bitcoin.height, + exit, + )?; + Ok(()) + }, + )?; + + self.indexes_to_true_market_mean_ratio.compute_rest( + price, + starting_indexes, + exit, + Some(self.indexes_to_true_market_mean.dateindex.unwrap_last()), + )?; + + // cointime_price = cointime_cap / circulating_supply + self.indexes_to_cointime_price + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_divide( + starting_indexes.height, + cap.indexes_to_cointime_cap.height.u(), + circulating_supply, + exit, + )?; + Ok(()) + })?; + + self.indexes_to_cointime_price_ratio.compute_rest( + price, + starting_indexes, + exit, + Some(self.indexes_to_cointime_price.dateindex.unwrap_last()), + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/cointime/pricing/import.rs b/crates/brk_computer/src/cointime/pricing/import.rs new file mode 100644 index 000000000..ef2f4b0eb --- /dev/null +++ b/crates/brk_computer/src/cointime/pricing/import.rs @@ -0,0 +1,70 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, price, + internal::{ComputedRatioVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + ) -> Result { + let last = || VecBuilderOptions::default().add_last(); + + macro_rules! computed_h { + ($name:expr) => { + ComputedVecsFromHeight::forced_import( + db, + $name, + Source::Compute, + version, + indexes, + last(), + )? + }; + } + + // Extract price vecs before struct literal so they can be used as sources for ratios + let indexes_to_vaulted_price = computed_h!("vaulted_price"); + let indexes_to_active_price = computed_h!("active_price"); + let indexes_to_true_market_mean = computed_h!("true_market_mean"); + let indexes_to_cointime_price = computed_h!("cointime_price"); + + macro_rules! ratio_di { + ($name:expr, $source:expr) => { + ComputedRatioVecsFromDateIndex::forced_import( + db, + $name, + Some($source), + version, + indexes, + true, + price, + )? + }; + } + + Ok(Self { + indexes_to_vaulted_price_ratio: ratio_di!("vaulted_price", &indexes_to_vaulted_price), + indexes_to_vaulted_price, + indexes_to_active_price_ratio: ratio_di!("active_price", &indexes_to_active_price), + indexes_to_active_price, + indexes_to_true_market_mean_ratio: ratio_di!( + "true_market_mean", + &indexes_to_true_market_mean + ), + indexes_to_true_market_mean, + indexes_to_cointime_price_ratio: ratio_di!( + "cointime_price", + &indexes_to_cointime_price + ), + indexes_to_cointime_price, + }) + } +} diff --git a/crates/brk_computer/src/cointime/pricing/mod.rs b/crates/brk_computer/src/cointime/pricing/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/cointime/pricing/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/cointime/pricing/vecs.rs b/crates/brk_computer/src/cointime/pricing/vecs.rs new file mode 100644 index 000000000..85d641fc7 --- /dev/null +++ b/crates/brk_computer/src/cointime/pricing/vecs.rs @@ -0,0 +1,16 @@ +use brk_traversable::Traversable; +use brk_types::Dollars; + +use crate::internal::{ComputedRatioVecsFromDateIndex, ComputedVecsFromHeight}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_vaulted_price: ComputedVecsFromHeight, + pub indexes_to_vaulted_price_ratio: ComputedRatioVecsFromDateIndex, + pub indexes_to_active_price: ComputedVecsFromHeight, + pub indexes_to_active_price_ratio: ComputedRatioVecsFromDateIndex, + pub indexes_to_true_market_mean: ComputedVecsFromHeight, + pub indexes_to_true_market_mean_ratio: ComputedRatioVecsFromDateIndex, + pub indexes_to_cointime_price: ComputedVecsFromHeight, + pub indexes_to_cointime_price_ratio: ComputedRatioVecsFromDateIndex, +} diff --git a/crates/brk_computer/src/cointime/supply/compute.rs b/crates/brk_computer/src/cointime/supply/compute.rs new file mode 100644 index 000000000..16fad3974 --- /dev/null +++ b/crates/brk_computer/src/cointime/supply/compute.rs @@ -0,0 +1,54 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::Vecs; +use super::super::activity; +use crate::{distribution, indexes, price, ComputeIndexes, utils::OptionExt}; + +impl Vecs { + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + price: Option<&price::Vecs>, + distribution: &distribution::Vecs, + activity: &activity::Vecs, + exit: &Exit, + ) -> Result<()> { + let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply; + + self.indexes_to_vaulted_supply.compute_all( + indexes, + price, + starting_indexes, + exit, + |vec| { + vec.compute_multiply( + starting_indexes.height, + circulating_supply, + activity.indexes_to_vaultedness.height.u(), + exit, + )?; + Ok(()) + }, + )?; + + self.indexes_to_active_supply.compute_all( + indexes, + price, + starting_indexes, + exit, + |vec| { + vec.compute_multiply( + starting_indexes.height, + circulating_supply, + activity.indexes_to_liveliness.height.u(), + exit, + )?; + Ok(()) + }, + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/cointime/supply/import.rs b/crates/brk_computer/src/cointime/supply/import.rs new file mode 100644 index 000000000..f7332fad1 --- /dev/null +++ b/crates/brk_computer/src/cointime/supply/import.rs @@ -0,0 +1,39 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedValueVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + compute_dollars: bool, + ) -> Result { + let last = || VecBuilderOptions::default().add_last(); + + macro_rules! value_h { + ($name:expr) => { + ComputedValueVecsFromHeight::forced_import( + db, + $name, + Source::Compute, + version, + last(), + compute_dollars, + indexes, + )? + }; + } + + Ok(Self { + indexes_to_vaulted_supply: value_h!("vaulted_supply"), + indexes_to_active_supply: value_h!("active_supply"), + }) + } +} diff --git a/crates/brk_computer/src/cointime/supply/mod.rs b/crates/brk_computer/src/cointime/supply/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/cointime/supply/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/cointime/supply/vecs.rs b/crates/brk_computer/src/cointime/supply/vecs.rs new file mode 100644 index 000000000..83e981ec8 --- /dev/null +++ b/crates/brk_computer/src/cointime/supply/vecs.rs @@ -0,0 +1,9 @@ +use brk_traversable::Traversable; + +use crate::internal::ComputedValueVecsFromHeight; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_vaulted_supply: ComputedValueVecsFromHeight, + pub indexes_to_active_supply: ComputedValueVecsFromHeight, +} diff --git a/crates/brk_computer/src/cointime/value/compute.rs b/crates/brk_computer/src/cointime/value/compute.rs new file mode 100644 index 000000000..fca533aba --- /dev/null +++ b/crates/brk_computer/src/cointime/value/compute.rs @@ -0,0 +1,72 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::super::activity; +use super::Vecs; +use crate::{distribution, indexes, price, utils::OptionExt, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + price: &price::Vecs, + distribution: &distribution::Vecs, + activity: &activity::Vecs, + exit: &Exit, + ) -> Result<()> { + let indexes_to_coinblocks_destroyed = &distribution + .utxo_cohorts + .all + .metrics + .activity + .indexes_to_coinblocks_destroyed; + + self.indexes_to_cointime_value_destroyed.compute_all( + indexes, + starting_indexes, + exit, + |vec| { + vec.compute_multiply( + starting_indexes.height, + &price.usd.chainindexes_to_price_close.height, + indexes_to_coinblocks_destroyed.height.u(), + exit, + )?; + Ok(()) + }, + )?; + + self.indexes_to_cointime_value_created.compute_all( + indexes, + starting_indexes, + exit, + |vec| { + vec.compute_multiply( + starting_indexes.height, + &price.usd.chainindexes_to_price_close.height, + activity.indexes_to_coinblocks_created.height.u(), + exit, + )?; + Ok(()) + }, + )?; + + self.indexes_to_cointime_value_stored.compute_all( + indexes, + starting_indexes, + exit, + |vec| { + vec.compute_multiply( + starting_indexes.height, + &price.usd.chainindexes_to_price_close.height, + activity.indexes_to_coinblocks_stored.height.u(), + exit, + )?; + Ok(()) + }, + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/cointime/value/import.rs b/crates/brk_computer/src/cointime/value/import.rs new file mode 100644 index 000000000..4ce53422f --- /dev/null +++ b/crates/brk_computer/src/cointime/value/import.rs @@ -0,0 +1,34 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative(); + + macro_rules! computed_h { + ($name:expr) => { + ComputedVecsFromHeight::forced_import( + db, + $name, + Source::Compute, + version, + indexes, + sum_cum(), + )? + }; + } + + Ok(Self { + indexes_to_cointime_value_destroyed: computed_h!("cointime_value_destroyed"), + indexes_to_cointime_value_created: computed_h!("cointime_value_created"), + indexes_to_cointime_value_stored: computed_h!("cointime_value_stored"), + }) + } +} diff --git a/crates/brk_computer/src/cointime/value/mod.rs b/crates/brk_computer/src/cointime/value/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/cointime/value/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/cointime/value/vecs.rs b/crates/brk_computer/src/cointime/value/vecs.rs new file mode 100644 index 000000000..e906e0796 --- /dev/null +++ b/crates/brk_computer/src/cointime/value/vecs.rs @@ -0,0 +1,11 @@ +use brk_traversable::Traversable; +use brk_types::StoredF64; + +use crate::internal::ComputedVecsFromHeight; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_cointime_value_destroyed: ComputedVecsFromHeight, + pub indexes_to_cointime_value_created: ComputedVecsFromHeight, + pub indexes_to_cointime_value_stored: ComputedVecsFromHeight, +} diff --git a/crates/brk_computer/src/constants.rs b/crates/brk_computer/src/constants.rs index 73b437147..5e1b7e628 100644 --- a/crates/brk_computer/src/constants.rs +++ b/crates/brk_computer/src/constants.rs @@ -2,7 +2,7 @@ use brk_traversable::Traversable; use brk_types::{StoredF32, StoredI16, StoredU16, Version}; use super::{ - grouped::{ConstantVecs, ReturnF32Tenths, ReturnI16, ReturnU16}, + internal::{ConstantVecs, ReturnF32Tenths, ReturnI16, ReturnU16}, indexes, }; diff --git a/crates/brk_computer/src/stateful/address/address_count.rs b/crates/brk_computer/src/distribution/address/address_count.rs similarity index 94% rename from crates/brk_computer/src/stateful/address/address_count.rs rename to crates/brk_computer/src/distribution/address/address_count.rs index 943c020ca..1dc7d0eef 100644 --- a/crates/brk_computer/src/stateful/address/address_count.rs +++ b/crates/brk_computer/src/distribution/address/address_count.rs @@ -10,9 +10,8 @@ use vecdb::{ }; use crate::{ - ComputeIndexes, - grouped::{ComputedVecsFromHeight, Source, VecBuilderOptions}, - indexes, + ComputeIndexes, indexes, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, }; /// Address count per address type (runtime state). @@ -75,18 +74,6 @@ impl AddressTypeToHeightToAddressCount { })?)) } - pub fn write(&mut self) -> Result<()> { - self.p2pk65.write()?; - self.p2pk33.write()?; - self.p2pkh.write()?; - self.p2sh.write()?; - self.p2wpkh.write()?; - self.p2wsh.write()?; - self.p2tr.write()?; - self.p2a.write()?; - Ok(()) - } - /// Returns a parallel iterator over all vecs for parallel writing. pub fn par_iter_mut(&mut self) -> impl ParallelIterator { let inner = &mut self.0; @@ -103,15 +90,15 @@ impl AddressTypeToHeightToAddressCount { .into_par_iter() } - pub fn safe_write(&mut self, exit: &Exit) -> Result<()> { - self.p2pk65.safe_write(exit)?; - self.p2pk33.safe_write(exit)?; - self.p2pkh.safe_write(exit)?; - self.p2sh.safe_write(exit)?; - self.p2wpkh.safe_write(exit)?; - self.p2wsh.safe_write(exit)?; - self.p2tr.safe_write(exit)?; - self.p2a.safe_write(exit)?; + pub fn write(&mut self) -> Result<()> { + self.p2pk65.write()?; + self.p2pk33.write()?; + self.p2pkh.write()?; + self.p2sh.write()?; + self.p2wpkh.write()?; + self.p2wsh.write()?; + self.p2tr.write()?; + self.p2a.write()?; Ok(()) } diff --git a/crates/brk_computer/src/stateful/address/data.rs b/crates/brk_computer/src/distribution/address/data.rs similarity index 100% rename from crates/brk_computer/src/stateful/address/data.rs rename to crates/brk_computer/src/distribution/address/data.rs diff --git a/crates/brk_computer/src/stateful/address/indexes/any.rs b/crates/brk_computer/src/distribution/address/indexes/any.rs similarity index 100% rename from crates/brk_computer/src/stateful/address/indexes/any.rs rename to crates/brk_computer/src/distribution/address/indexes/any.rs diff --git a/crates/brk_computer/src/stateful/address/indexes/mod.rs b/crates/brk_computer/src/distribution/address/indexes/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/address/indexes/mod.rs rename to crates/brk_computer/src/distribution/address/indexes/mod.rs diff --git a/crates/brk_computer/src/stateful/address/mod.rs b/crates/brk_computer/src/distribution/address/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/address/mod.rs rename to crates/brk_computer/src/distribution/address/mod.rs diff --git a/crates/brk_computer/src/stateful/address/type_map/height_vec.rs b/crates/brk_computer/src/distribution/address/type_map/height_vec.rs similarity index 100% rename from crates/brk_computer/src/stateful/address/type_map/height_vec.rs rename to crates/brk_computer/src/distribution/address/type_map/height_vec.rs diff --git a/crates/brk_computer/src/stateful/address/type_map/index_map.rs b/crates/brk_computer/src/distribution/address/type_map/index_map.rs similarity index 100% rename from crates/brk_computer/src/stateful/address/type_map/index_map.rs rename to crates/brk_computer/src/distribution/address/type_map/index_map.rs diff --git a/crates/brk_computer/src/stateful/address/type_map/mod.rs b/crates/brk_computer/src/distribution/address/type_map/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/address/type_map/mod.rs rename to crates/brk_computer/src/distribution/address/type_map/mod.rs diff --git a/crates/brk_computer/src/stateful/address/type_map/vec.rs b/crates/brk_computer/src/distribution/address/type_map/vec.rs similarity index 100% rename from crates/brk_computer/src/stateful/address/type_map/vec.rs rename to crates/brk_computer/src/distribution/address/type_map/vec.rs diff --git a/crates/brk_computer/src/stateful/block/cache/address.rs b/crates/brk_computer/src/distribution/block/cache/address.rs similarity index 99% rename from crates/brk_computer/src/stateful/block/cache/address.rs rename to crates/brk_computer/src/distribution/block/cache/address.rs index 734539fda..b5b6da32f 100644 --- a/crates/brk_computer/src/stateful/block/cache/address.rs +++ b/crates/brk_computer/src/distribution/block/cache/address.rs @@ -2,7 +2,7 @@ use brk_cohort::ByAddressType; use brk_types::{AnyAddressDataIndexEnum, LoadedAddressData, OutputType, TypeIndex}; use vecdb::GenericStoredVec; -use crate::stateful::{ +use crate::distribution::{ address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs}, compute::VecsReaders, }; diff --git a/crates/brk_computer/src/stateful/block/cache/lookup.rs b/crates/brk_computer/src/distribution/block/cache/lookup.rs similarity index 98% rename from crates/brk_computer/src/stateful/block/cache/lookup.rs rename to crates/brk_computer/src/distribution/block/cache/lookup.rs index 00f825193..8f2674c57 100644 --- a/crates/brk_computer/src/stateful/block/cache/lookup.rs +++ b/crates/brk_computer/src/distribution/block/cache/lookup.rs @@ -1,6 +1,6 @@ use brk_types::{LoadedAddressData, OutputType, TypeIndex}; -use crate::stateful::address::AddressTypeToTypeIndexMap; +use crate::distribution::address::AddressTypeToTypeIndexMap; use super::super::cohort::{ EmptyAddressDataWithSource, LoadedAddressDataWithSource, WithAddressDataSource, diff --git a/crates/brk_computer/src/stateful/block/cache/mod.rs b/crates/brk_computer/src/distribution/block/cache/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/block/cache/mod.rs rename to crates/brk_computer/src/distribution/block/cache/mod.rs diff --git a/crates/brk_computer/src/stateful/block/cohort/address_updates.rs b/crates/brk_computer/src/distribution/block/cohort/address_updates.rs similarity index 98% rename from crates/brk_computer/src/stateful/block/cohort/address_updates.rs rename to crates/brk_computer/src/distribution/block/cohort/address_updates.rs index bea90d700..7b289eb36 100644 --- a/crates/brk_computer/src/stateful/block/cohort/address_updates.rs +++ b/crates/brk_computer/src/distribution/block/cohort/address_updates.rs @@ -4,7 +4,7 @@ use brk_types::{ OutputType, TypeIndex, }; -use crate::stateful::{AddressTypeToTypeIndexMap, AddressesDataVecs}; +use crate::distribution::{AddressTypeToTypeIndexMap, AddressesDataVecs}; use super::with_source::{EmptyAddressDataWithSource, LoadedAddressDataWithSource}; diff --git a/crates/brk_computer/src/stateful/block/cohort/mod.rs b/crates/brk_computer/src/distribution/block/cohort/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/block/cohort/mod.rs rename to crates/brk_computer/src/distribution/block/cohort/mod.rs diff --git a/crates/brk_computer/src/stateful/block/cohort/received.rs b/crates/brk_computer/src/distribution/block/cohort/received.rs similarity index 98% rename from crates/brk_computer/src/stateful/block/cohort/received.rs rename to crates/brk_computer/src/distribution/block/cohort/received.rs index 3491c3ed5..903373827 100644 --- a/crates/brk_computer/src/stateful/block/cohort/received.rs +++ b/crates/brk_computer/src/distribution/block/cohort/received.rs @@ -2,7 +2,7 @@ use brk_cohort::{AmountBucket, ByAddressType}; use brk_types::{Dollars, Sats, TypeIndex}; use rustc_hash::FxHashMap; -use crate::stateful::{address::AddressTypeToVec, cohorts::AddressCohorts}; +use crate::distribution::{address::AddressTypeToVec, cohorts::AddressCohorts}; use super::super::cache::{AddressLookup, TrackingStatus}; diff --git a/crates/brk_computer/src/stateful/block/cohort/sent.rs b/crates/brk_computer/src/distribution/block/cohort/sent.rs similarity index 98% rename from crates/brk_computer/src/stateful/block/cohort/sent.rs rename to crates/brk_computer/src/distribution/block/cohort/sent.rs index f0ed7fd0b..d305f132a 100644 --- a/crates/brk_computer/src/stateful/block/cohort/sent.rs +++ b/crates/brk_computer/src/distribution/block/cohort/sent.rs @@ -3,7 +3,7 @@ use brk_error::Result; use brk_types::{CheckedSub, Dollars, Height, Sats, Timestamp, TypeIndex}; use vecdb::{VecIndex, unlikely}; -use crate::stateful::{address::HeightToAddressTypeToVec, cohorts::AddressCohorts}; +use crate::distribution::{address::HeightToAddressTypeToVec, cohorts::AddressCohorts}; use super::super::cache::AddressLookup; diff --git a/crates/brk_computer/src/stateful/block/cohort/tx_counts.rs b/crates/brk_computer/src/distribution/block/cohort/tx_counts.rs similarity index 96% rename from crates/brk_computer/src/stateful/block/cohort/tx_counts.rs rename to crates/brk_computer/src/distribution/block/cohort/tx_counts.rs index 0656c2c9c..72838f0ab 100644 --- a/crates/brk_computer/src/stateful/block/cohort/tx_counts.rs +++ b/crates/brk_computer/src/distribution/block/cohort/tx_counts.rs @@ -1,4 +1,4 @@ -use crate::stateful::address::AddressTypeToTypeIndexMap; +use crate::distribution::address::AddressTypeToTypeIndexMap; use super::with_source::{EmptyAddressDataWithSource, LoadedAddressDataWithSource, TxIndexVec}; diff --git a/crates/brk_computer/src/stateful/block/cohort/with_source.rs b/crates/brk_computer/src/distribution/block/cohort/with_source.rs similarity index 100% rename from crates/brk_computer/src/stateful/block/cohort/with_source.rs rename to crates/brk_computer/src/distribution/block/cohort/with_source.rs diff --git a/crates/brk_computer/src/stateful/block/mod.rs b/crates/brk_computer/src/distribution/block/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/block/mod.rs rename to crates/brk_computer/src/distribution/block/mod.rs diff --git a/crates/brk_computer/src/stateful/block/utxo/inputs.rs b/crates/brk_computer/src/distribution/block/utxo/inputs.rs similarity index 98% rename from crates/brk_computer/src/stateful/block/utxo/inputs.rs rename to crates/brk_computer/src/distribution/block/utxo/inputs.rs index 2aefbaafe..9185e689e 100644 --- a/crates/brk_computer/src/stateful/block/utxo/inputs.rs +++ b/crates/brk_computer/src/distribution/block/utxo/inputs.rs @@ -3,13 +3,13 @@ use brk_types::{Height, OutputType, Sats, TxIndex, TypeIndex}; use rayon::prelude::*; use rustc_hash::FxHashMap; -use crate::stateful::{ +use crate::distribution::{ address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs}, compute::VecsReaders, state::Transacted, }; -use crate::stateful::address::HeightToAddressTypeToVec; +use crate::distribution::address::HeightToAddressTypeToVec; use super::super::{ cache::{AddressCache, load_uncached_address_data}, diff --git a/crates/brk_computer/src/stateful/block/utxo/mod.rs b/crates/brk_computer/src/distribution/block/utxo/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/block/utxo/mod.rs rename to crates/brk_computer/src/distribution/block/utxo/mod.rs diff --git a/crates/brk_computer/src/stateful/block/utxo/outputs.rs b/crates/brk_computer/src/distribution/block/utxo/outputs.rs similarity index 99% rename from crates/brk_computer/src/stateful/block/utxo/outputs.rs rename to crates/brk_computer/src/distribution/block/utxo/outputs.rs index fe361e183..e06289376 100644 --- a/crates/brk_computer/src/stateful/block/utxo/outputs.rs +++ b/crates/brk_computer/src/distribution/block/utxo/outputs.rs @@ -1,7 +1,7 @@ use brk_cohort::ByAddressType; use brk_types::{Sats, TxIndex, TypeIndex}; -use crate::stateful::{ +use crate::distribution::{ address::{ AddressTypeToTypeIndexMap, AddressTypeToVec, AddressesDataVecs, AnyAddressIndexesVecs, }, diff --git a/crates/brk_computer/src/stateful/cohorts/address/groups.rs b/crates/brk_computer/src/distribution/cohorts/address/groups.rs similarity index 92% rename from crates/brk_computer/src/stateful/cohorts/address/groups.rs rename to crates/brk_computer/src/distribution/cohorts/address/groups.rs index e051fb3a4..82093727d 100644 --- a/crates/brk_computer/src/stateful/cohorts/address/groups.rs +++ b/crates/brk_computer/src/distribution/cohorts/address/groups.rs @@ -10,9 +10,9 @@ use derive_deref::{Deref, DerefMut}; use rayon::prelude::*; use vecdb::{AnyStoredVec, Database, Exit, IterableVec}; -use crate::{ComputeIndexes, indexes, price, stateful::DynCohortVecs}; +use crate::{ComputeIndexes, indexes, price, distribution::DynCohortVecs}; -use crate::stateful::metrics::SupplyMetrics; +use crate::distribution::metrics::SupplyMetrics; use super::{super::traits::CohortVecs, vecs::AddressCohortVecs}; @@ -164,13 +164,21 @@ impl AddressCohorts { } /// Get minimum height from all separate cohorts' height-indexed vectors. - pub fn min_separate_height_vecs_len(&self) -> Height { + pub fn min_separate_stateful_height_len(&self) -> Height { self.iter_separate() - .map(|v| Height::from(v.min_height_vecs_len())) + .map(|v| Height::from(v.min_stateful_height_len())) .min() .unwrap_or_default() } + /// Get minimum dateindex from all separate cohorts' dateindex-indexed vectors. + pub fn min_separate_stateful_dateindex_len(&self) -> usize { + self.iter_separate() + .map(|v| v.min_stateful_dateindex_len()) + .min() + .unwrap_or(usize::MAX) + } + /// Import state for all separate cohorts at or before given height. /// Returns true if all imports succeeded and returned the expected height. pub fn import_separate_states(&mut self, height: Height) -> bool { diff --git a/crates/brk_computer/src/stateful/cohorts/address/mod.rs b/crates/brk_computer/src/distribution/cohorts/address/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/cohorts/address/mod.rs rename to crates/brk_computer/src/distribution/cohorts/address/mod.rs diff --git a/crates/brk_computer/src/stateful/cohorts/address/vecs.rs b/crates/brk_computer/src/distribution/cohorts/address/vecs.rs similarity index 94% rename from crates/brk_computer/src/stateful/cohorts/address/vecs.rs rename to crates/brk_computer/src/distribution/cohorts/address/vecs.rs index 037331bfd..8d35f9a58 100644 --- a/crates/brk_computer/src/stateful/cohorts/address/vecs.rs +++ b/crates/brk_computer/src/distribution/cohorts/address/vecs.rs @@ -12,12 +12,12 @@ use vecdb::{ use crate::{ ComputeIndexes, - grouped::{ComputedVecsFromHeight, Source, VecBuilderOptions}, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, indexes, price, - stateful::state::AddressCohortState, + distribution::state::AddressCohortState, }; -use crate::stateful::metrics::{CohortMetrics, ImportConfig, SupplyMetrics}; +use crate::distribution::metrics::{CohortMetrics, ImportConfig, SupplyMetrics}; use super::super::traits::{CohortVecs, DynCohortVecs}; @@ -114,14 +114,6 @@ impl AddressCohortVecs { self.starting_height = Some(Height::ZERO); } - /// Get minimum length across height-indexed vectors. - pub fn min_len(&self) -> usize { - self.height_to_addr_count - .len() - .min(self.metrics.supply.min_len()) - .min(self.metrics.activity.min_len()) - } - /// Returns a parallel iterator over all vecs for parallel writing. pub fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator { rayon::iter::once(&mut self.height_to_addr_count as &mut dyn AnyStoredVec) @@ -144,8 +136,14 @@ impl Filtered for AddressCohortVecs { } impl DynCohortVecs for AddressCohortVecs { - fn min_height_vecs_len(&self) -> usize { - self.min_len() + fn min_stateful_height_len(&self) -> usize { + self.height_to_addr_count + .len() + .min(self.metrics.min_stateful_height_len()) + } + + fn min_stateful_dateindex_len(&self) -> usize { + self.metrics.min_stateful_dateindex_len() } fn reset_state_starting_height(&mut self) { @@ -205,9 +203,7 @@ impl DynCohortVecs for AddressCohortVecs { fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { use vecdb::GenericStoredVec; self.height_to_addr_count - .validate_computed_version_or_reset( - base_version + self.height_to_addr_count.inner_version(), - )?; + .validate_computed_version_or_reset(base_version)?; self.metrics.validate_computed_versions(base_version)?; Ok(()) } diff --git a/crates/brk_computer/src/stateful/cohorts/mod.rs b/crates/brk_computer/src/distribution/cohorts/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/cohorts/mod.rs rename to crates/brk_computer/src/distribution/cohorts/mod.rs diff --git a/crates/brk_computer/src/stateful/cohorts/traits.rs b/crates/brk_computer/src/distribution/cohorts/traits.rs similarity index 89% rename from crates/brk_computer/src/stateful/cohorts/traits.rs rename to crates/brk_computer/src/distribution/cohorts/traits.rs index 282c82b30..a9c210329 100644 --- a/crates/brk_computer/src/stateful/cohorts/traits.rs +++ b/crates/brk_computer/src/distribution/cohorts/traits.rs @@ -8,8 +8,11 @@ use crate::{ComputeIndexes, indexes, price}; /// /// This trait enables heterogeneous cohort processing via trait objects. pub trait DynCohortVecs: Send + Sync { - /// Get minimum length across height-indexed vectors. - fn min_height_vecs_len(&self) -> usize; + /// Get minimum length across height-indexed vectors written in block loop. + fn min_stateful_height_len(&self) -> usize; + + /// Get minimum length across dateindex-indexed vectors written in block loop. + fn min_stateful_dateindex_len(&self) -> usize; /// Reset the starting height for state tracking. fn reset_state_starting_height(&mut self); diff --git a/crates/brk_computer/src/stateful/cohorts/utxo/groups.rs b/crates/brk_computer/src/distribution/cohorts/utxo/groups.rs similarity index 90% rename from crates/brk_computer/src/stateful/cohorts/utxo/groups.rs rename to crates/brk_computer/src/distribution/cohorts/utxo/groups.rs index 1ec1bc0f8..b83374a9e 100644 --- a/crates/brk_computer/src/stateful/cohorts/utxo/groups.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/groups.rs @@ -13,9 +13,9 @@ use vecdb::{AnyStoredVec, Database, Exit, IterableVec}; use crate::{ ComputeIndexes, - grouped::{PERCENTILES, PERCENTILES_LEN}, + internal::{PERCENTILES, PERCENTILES_LEN}, indexes, price, - stateful::DynCohortVecs, + distribution::DynCohortVecs, }; use super::{super::traits::CohortVecs, vecs::UTXOCohortVecs}; @@ -247,13 +247,33 @@ impl UTXOCohorts { } /// Get minimum height from all separate cohorts' height-indexed vectors. - pub fn min_separate_height_vecs_len(&self) -> Height { + pub fn min_separate_stateful_height_len(&self) -> Height { self.iter_separate() - .map(|v| Height::from(v.min_height_vecs_len())) + .map(|v| Height::from(v.min_stateful_height_len())) .min() .unwrap_or_default() } + /// Get minimum dateindex from all separate cohorts' dateindex-indexed vectors. + pub fn min_separate_stateful_dateindex_len(&self) -> usize { + self.iter_separate() + .map(|v| v.min_stateful_dateindex_len()) + .min() + .unwrap_or(usize::MAX) + } + + /// Get minimum dateindex from all aggregate cohorts' dateindex-indexed vectors. + /// This checks cost_basis percentiles which are only on aggregate cohorts. + pub fn min_aggregate_stateful_dateindex_len(&self) -> usize { + self.0 + .iter_aggregate() + .filter_map(|v| v.metrics.cost_basis.as_ref()) + .filter_map(|cb| cb.percentiles.as_ref()) + .map(|cbp| cbp.min_stateful_dateindex_len()) + .min() + .unwrap_or(usize::MAX) + } + /// Import state for all separate cohorts at or before given height. /// Returns true if all imports succeeded and returned the expected height. pub fn import_separate_states(&mut self, height: Height) -> bool { @@ -305,12 +325,12 @@ impl UTXOCohorts { for aggregate in self.0.iter_aggregate_mut() { let filter = aggregate.filter().clone(); - // Get price_percentiles storage, skip if not configured - let Some(pp) = aggregate + // Get cost_basis percentiles storage, skip if not configured + let Some(percentiles) = aggregate .metrics - .price_paid + .cost_basis .as_mut() - .and_then(|p| p.price_percentiles.as_mut()) + .and_then(|cb| cb.percentiles.as_mut()) else { continue; }; @@ -325,7 +345,7 @@ impl UTXOCohorts { let total_supply: u64 = relevant.iter().map(|(_, s, _)| u64::from(*s)).sum(); if total_supply == 0 { - pp.truncate_push(dateindex, &[Dollars::NAN; PERCENTILES_LEN])?; + percentiles.truncate_push(dateindex, &[Dollars::NAN; PERCENTILES_LEN])?; continue; } @@ -388,7 +408,7 @@ impl UTXOCohorts { } } - pp.truncate_push(dateindex, &result)?; + percentiles.truncate_push(dateindex, &result)?; } Ok(()) @@ -400,7 +420,7 @@ impl UTXOCohorts { self.par_iter_separate_mut() .try_for_each(|v| v.validate_computed_versions(base_version))?; - // Validate aggregate cohorts' price_percentiles + // Validate aggregate cohorts' cost_basis percentiles for v in self.0.iter_aggregate_mut() { v.validate_computed_versions(base_version)?; } diff --git a/crates/brk_computer/src/stateful/cohorts/utxo/mod.rs b/crates/brk_computer/src/distribution/cohorts/utxo/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/cohorts/utxo/mod.rs rename to crates/brk_computer/src/distribution/cohorts/utxo/mod.rs diff --git a/crates/brk_computer/src/stateful/cohorts/utxo/receive.rs b/crates/brk_computer/src/distribution/cohorts/utxo/receive.rs similarity index 97% rename from crates/brk_computer/src/stateful/cohorts/utxo/receive.rs rename to crates/brk_computer/src/distribution/cohorts/utxo/receive.rs index 048714e58..c9540f30d 100644 --- a/crates/brk_computer/src/stateful/cohorts/utxo/receive.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/receive.rs @@ -1,6 +1,6 @@ use brk_types::{Dollars, Height, Timestamp}; -use crate::stateful::state::Transacted; +use crate::distribution::state::Transacted; use super::groups::UTXOCohorts; diff --git a/crates/brk_computer/src/stateful/cohorts/utxo/send.rs b/crates/brk_computer/src/distribution/cohorts/utxo/send.rs similarity index 98% rename from crates/brk_computer/src/stateful/cohorts/utxo/send.rs rename to crates/brk_computer/src/distribution/cohorts/utxo/send.rs index 70c59110f..b18de5b4e 100644 --- a/crates/brk_computer/src/stateful/cohorts/utxo/send.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/send.rs @@ -3,7 +3,7 @@ use rustc_hash::FxHashMap; use vecdb::VecIndex; use crate::{ - stateful::state::{BlockState, Transacted}, + distribution::state::{BlockState, Transacted}, utils::OptionExt, }; diff --git a/crates/brk_computer/src/stateful/cohorts/utxo/tick_tock.rs b/crates/brk_computer/src/distribution/cohorts/utxo/tick_tock.rs similarity index 98% rename from crates/brk_computer/src/stateful/cohorts/utxo/tick_tock.rs rename to crates/brk_computer/src/distribution/cohorts/utxo/tick_tock.rs index 8880ee257..61ab8c6ad 100644 --- a/crates/brk_computer/src/stateful/cohorts/utxo/tick_tock.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/tick_tock.rs @@ -1,7 +1,7 @@ use brk_cohort::AGE_BOUNDARIES; use brk_types::{ONE_DAY_IN_SEC, Timestamp}; -use crate::stateful::state::BlockState; +use crate::distribution::state::BlockState; use super::groups::UTXOCohorts; diff --git a/crates/brk_computer/src/stateful/cohorts/utxo/vecs.rs b/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs similarity index 95% rename from crates/brk_computer/src/stateful/cohorts/utxo/vecs.rs rename to crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs index 81168a838..5ee9b2f9f 100644 --- a/crates/brk_computer/src/stateful/cohorts/utxo/vecs.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs @@ -7,9 +7,9 @@ use brk_types::{Bitcoin, DateIndex, Dollars, Height, Version}; use rayon::prelude::*; use vecdb::{AnyStoredVec, Database, Exit, IterableVec}; -use crate::{ComputeIndexes, indexes, price, stateful::state::UTXOCohortState}; +use crate::{ComputeIndexes, indexes, price, distribution::state::UTXOCohortState}; -use crate::stateful::metrics::{CohortMetrics, ImportConfig, SupplyMetrics}; +use crate::distribution::metrics::{CohortMetrics, ImportConfig, SupplyMetrics}; use super::super::traits::{CohortVecs, DynCohortVecs}; @@ -114,8 +114,12 @@ impl Filtered for UTXOCohortVecs { } impl DynCohortVecs for UTXOCohortVecs { - fn min_height_vecs_len(&self) -> usize { - self.metrics.min_len() + fn min_stateful_height_len(&self) -> usize { + self.metrics.min_stateful_height_len() + } + + fn min_stateful_dateindex_len(&self) -> usize { + self.metrics.min_stateful_dateindex_len() } fn reset_state_starting_height(&mut self) { diff --git a/crates/brk_computer/src/stateful/compute/aggregates.rs b/crates/brk_computer/src/distribution/compute/aggregates.rs similarity index 100% rename from crates/brk_computer/src/stateful/compute/aggregates.rs rename to crates/brk_computer/src/distribution/compute/aggregates.rs diff --git a/crates/brk_computer/src/stateful/compute/block_loop.rs b/crates/brk_computer/src/distribution/compute/block_loop.rs similarity index 85% rename from crates/brk_computer/src/stateful/compute/block_loop.rs rename to crates/brk_computer/src/distribution/compute/block_loop.rs index a847afab6..ec736b43c 100644 --- a/crates/brk_computer/src/stateful/compute/block_loop.rs +++ b/crates/brk_computer/src/distribution/compute/block_loop.rs @@ -6,11 +6,11 @@ use brk_indexer::Indexer; use brk_types::{DateIndex, Height, OutputType, Sats, TxIndex, TypeIndex}; use log::info; use rayon::prelude::*; -use vecdb::{Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex}; +use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex}; use crate::{ - chain, indexes, price, - stateful::{ + blocks, transactions, indexes, price, + distribution::{ address::AddressTypeToAddressCount, block::{ AddressCache, InputsResult, process_inputs, process_outputs, process_received, @@ -19,7 +19,7 @@ use crate::{ compute::write::{process_address_updates, write}, state::{BlockState, Transacted}, }, - txins, + inputs, outputs, utils::OptionExt, }; @@ -40,8 +40,10 @@ pub fn process_blocks( vecs: &mut Vecs, indexer: &Indexer, indexes: &indexes::Vecs, - txins: &txins::Vecs, - chain: &chain::Vecs, + inputs: &inputs::Vecs, + outputs: &outputs::Vecs, + transactions: &transactions::Vecs, + blocks: &blocks::Vecs, price: Option<&price::Vecs>, starting_height: Height, last_height: Height, @@ -49,7 +51,7 @@ pub fn process_blocks( exit: &Exit, ) -> Result<()> { // Create computation context with pre-computed vectors for thread-safe access - let ctx = ComputeContext::new(starting_height, last_height, indexes, price); + let ctx = ComputeContext::new(starting_height, last_height, blocks, price); if ctx.starting_height > ctx.last_height { return Ok(()); @@ -61,33 +63,21 @@ pub fn process_blocks( let height_to_first_txoutindex = &indexer.vecs.txout.height_to_first_txoutindex; let height_to_first_txinindex = &indexer.vecs.txin.height_to_first_txinindex; - // From chain (via .height.u() or .height.unwrap_sum() patterns): - let height_to_tx_count = chain.transaction.indexes_to_tx_count.height.u(); - let height_to_output_count = chain - .transaction - .indexes_to_output_count - .height - .unwrap_sum(); - let height_to_input_count = chain.transaction.indexes_to_input_count.height.unwrap_sum(); - let height_to_unclaimed_rewards = chain - .coinbase - .indexes_to_unclaimed_rewards - .sats - .height - .as_ref() - .unwrap(); - - // From indexes: - let height_to_timestamp = &indexes.block.height_to_timestamp_fixed; - let height_to_date = &indexes.block.height_to_date_fixed; + // From transactions and inputs/outputs (via .height.u() or .height.unwrap_sum() patterns): + let height_to_tx_count = transactions.count.indexes_to_tx_count.height.u(); + let height_to_output_count = outputs.count.indexes_to_count.height.unwrap_sum(); + let height_to_input_count = inputs.count.indexes_to_count.height.unwrap_sum(); + // From blocks: + let height_to_timestamp = &blocks.time.height_to_timestamp_fixed; + let height_to_date = &blocks.time.height_to_date_fixed; let dateindex_to_first_height = &indexes.time.dateindex_to_first_height; let dateindex_to_height_count = &indexes.time.dateindex_to_height_count; let txindex_to_output_count = &indexes.transaction.txindex_to_output_count; let txindex_to_input_count = &indexes.transaction.txindex_to_input_count; // From price (optional): - let height_to_price = price.map(|p| &p.chainindexes_to_price_close.height); - let dateindex_to_price = price.map(|p| p.timeindexes_to_price_close.dateindex.u()); + let height_to_price = price.map(|p| &p.usd.chainindexes_to_price_close.height); + let dateindex_to_price = price.map(|p| p.usd.timeindexes_to_price_close.dateindex.u()); // Access pre-computed vectors from context for thread-safe access let height_to_price_vec = &ctx.height_to_price; @@ -100,7 +90,6 @@ pub fn process_blocks( let mut height_to_tx_count_iter = height_to_tx_count.into_iter(); let mut height_to_output_count_iter = height_to_output_count.into_iter(); let mut height_to_input_count_iter = height_to_input_count.into_iter(); - let mut height_to_unclaimed_rewards_iter = height_to_unclaimed_rewards.into_iter(); let mut height_to_timestamp_iter = height_to_timestamp.into_iter(); let mut height_to_date_iter = height_to_date.into_iter(); let mut dateindex_to_first_height_iter = dateindex_to_first_height.into_iter(); @@ -123,7 +112,7 @@ pub fn process_blocks( // Create reusable iterators for sequential txout/txin reads (16KB buffered) let mut txout_iters = TxOutIterators::new(indexer); - let mut txin_iters = TxInIterators::new(indexer, txins, &mut txindex_to_height); + let mut txin_iters = TxInIterators::new(indexer, inputs, &mut txindex_to_height); // Create iterators for first address indexes per type let mut first_p2a_iter = indexer @@ -168,38 +157,23 @@ pub fn process_blocks( .into_iter(); // Track running totals - recover from previous height if resuming - let ( - mut unspendable_supply, - mut opreturn_supply, - mut addresstype_to_addr_count, - mut addresstype_to_empty_addr_count, - ) = if starting_height > Height::ZERO { - let prev_height = starting_height.decremented().unwrap(); - let unspendable = vecs - .height_to_unspendable_supply - .into_iter() - .get_unwrap(prev_height); - let opreturn = vecs - .height_to_opreturn_supply - .into_iter() - .get_unwrap(prev_height); - let addr_count = AddressTypeToAddressCount::from(( - &vecs.addresstype_to_height_to_addr_count, - starting_height, - )); - let empty_addr_count = AddressTypeToAddressCount::from(( - &vecs.addresstype_to_height_to_empty_addr_count, - starting_height, - )); - (unspendable, opreturn, addr_count, empty_addr_count) - } else { - ( - Sats::ZERO, - Sats::ZERO, - AddressTypeToAddressCount::default(), - AddressTypeToAddressCount::default(), - ) - }; + let (mut addresstype_to_addr_count, mut addresstype_to_empty_addr_count) = + if starting_height > Height::ZERO { + let addr_count = AddressTypeToAddressCount::from(( + &vecs.addresstype_to_height_to_addr_count, + starting_height, + )); + let empty_addr_count = AddressTypeToAddressCount::from(( + &vecs.addresstype_to_height_to_empty_addr_count, + starting_height, + )); + (addr_count, empty_addr_count) + } else { + ( + AddressTypeToAddressCount::default(), + AddressTypeToAddressCount::default(), + ) + }; let mut cache = AddressCache::new(); @@ -320,16 +294,10 @@ pub fn process_blocks( let mut transacted = outputs_result.transacted; let mut height_to_sent = inputs_result.height_to_sent; - // Update supply tracking - unspendable_supply += transacted.by_type.unspendable.opreturn.value - + height_to_unclaimed_rewards_iter.get_unwrap(height); - opreturn_supply += transacted.by_type.unspendable.opreturn.value; - // Handle special cases if height == Height::ZERO { - // Genesis block - reset transacted, add 50 BTC to unspendable + // Genesis block - reset transacted (50 BTC is unspendable, handled in supply module) transacted = Transacted::default(); - unspendable_supply += Sats::FIFTY_BTC; } else if height == Height::new(BIP30_DUPLICATE_HEIGHT_1) || height == Height::new(BIP30_DUPLICATE_HEIGHT_2) { @@ -394,10 +362,6 @@ pub fn process_blocks( }); // Push to height-indexed vectors - vecs.height_to_unspendable_supply - .truncate_push(height, unspendable_supply)?; - vecs.height_to_opreturn_supply - .truncate_push(height, opreturn_supply)?; vecs.addresstype_to_height_to_addr_count .truncate_push(height, &addresstype_to_addr_count)?; vecs.addresstype_to_height_to_empty_addr_count diff --git a/crates/brk_computer/src/stateful/compute/context.rs b/crates/brk_computer/src/distribution/compute/context.rs similarity index 87% rename from crates/brk_computer/src/stateful/compute/context.rs rename to crates/brk_computer/src/distribution/compute/context.rs index b079e506a..d2be94f34 100644 --- a/crates/brk_computer/src/stateful/compute/context.rs +++ b/crates/brk_computer/src/distribution/compute/context.rs @@ -1,7 +1,7 @@ use brk_types::{Dollars, Height, Timestamp}; use vecdb::VecIndex; -use crate::{indexes, price}; +use crate::{blocks, price}; /// Context shared across block processing. pub struct ComputeContext { @@ -23,14 +23,14 @@ impl ComputeContext { pub fn new( starting_height: Height, last_height: Height, - indexes: &indexes::Vecs, + blocks: &blocks::Vecs, price: Option<&price::Vecs>, ) -> Self { let height_to_timestamp: Vec = - indexes.block.height_to_timestamp_fixed.into_iter().collect(); + blocks.time.height_to_timestamp_fixed.into_iter().collect(); let height_to_price: Option> = price - .map(|p| &p.chainindexes_to_price_close.height) + .map(|p| &p.usd.chainindexes_to_price_close.height) .map(|v| v.into_iter().map(|d| *d).collect()); Self { diff --git a/crates/brk_computer/src/stateful/compute/mod.rs b/crates/brk_computer/src/distribution/compute/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/compute/mod.rs rename to crates/brk_computer/src/distribution/compute/mod.rs diff --git a/crates/brk_computer/src/stateful/compute/readers.rs b/crates/brk_computer/src/distribution/compute/readers.rs similarity index 98% rename from crates/brk_computer/src/stateful/compute/readers.rs rename to crates/brk_computer/src/distribution/compute/readers.rs index 0c2535c6b..397d445cb 100644 --- a/crates/brk_computer/src/stateful/compute/readers.rs +++ b/crates/brk_computer/src/distribution/compute/readers.rs @@ -9,11 +9,11 @@ use vecdb::{ }; use crate::{ - stateful::{ + distribution::{ RangeMap, address::{AddressesDataVecs, AnyAddressIndexesVecs}, }, - txins, + inputs, }; /// Output data collected from separate vecs. @@ -71,11 +71,11 @@ pub struct TxInIterators<'a> { impl<'a> TxInIterators<'a> { pub fn new( indexer: &'a Indexer, - txins: &'a txins::Vecs, + txins: &'a inputs::Vecs, txindex_to_height: &'a mut RangeMap, ) -> Self { Self { - value_iter: txins.txinindex_to_value.into_iter(), + value_iter: txins.spent.txinindex_to_value.into_iter(), outpoint_iter: indexer.vecs.txin.txinindex_to_outpoint.into_iter(), outputtype_iter: indexer.vecs.txin.txinindex_to_outputtype.into_iter(), typeindex_iter: indexer.vecs.txin.txinindex_to_typeindex.into_iter(), diff --git a/crates/brk_computer/src/stateful/compute/recover.rs b/crates/brk_computer/src/distribution/compute/recover.rs similarity index 100% rename from crates/brk_computer/src/stateful/compute/recover.rs rename to crates/brk_computer/src/distribution/compute/recover.rs diff --git a/crates/brk_computer/src/stateful/compute/write.rs b/crates/brk_computer/src/distribution/compute/write.rs similarity index 92% rename from crates/brk_computer/src/stateful/compute/write.rs rename to crates/brk_computer/src/distribution/compute/write.rs index 8c69a967b..580bc42f1 100644 --- a/crates/brk_computer/src/stateful/compute/write.rs +++ b/crates/brk_computer/src/distribution/compute/write.rs @@ -6,7 +6,7 @@ use log::info; use rayon::prelude::*; use vecdb::{AnyStoredVec, GenericStoredVec, Stamp}; -use crate::stateful::{ +use crate::distribution::{ Vecs, block::{ EmptyAddressDataWithSource, LoadedAddressDataWithSource, process_empty_addresses, @@ -31,6 +31,8 @@ pub fn process_address_updates( empty_updates: AddressTypeToTypeIndexMap, loaded_updates: AddressTypeToTypeIndexMap, ) -> Result<()> { + info!("Processing address updates..."); + let empty_result = process_empty_addresses(addresses_data, empty_updates)?; let loaded_result = process_loaded_addresses(addresses_data, loaded_updates)?; let all_updates = empty_result.merge(loaded_result); @@ -83,12 +85,6 @@ pub fn write( .chain(rayon::iter::once( &mut vecs.chain_state as &mut dyn AnyStoredVec, )) - .chain(rayon::iter::once( - &mut vecs.height_to_unspendable_supply as &mut dyn AnyStoredVec, - )) - .chain(rayon::iter::once( - &mut vecs.height_to_opreturn_supply as &mut dyn AnyStoredVec, - )) .chain(vecs.utxo_cohorts.par_iter_vecs_mut()) .chain(vecs.address_cohorts.par_iter_vecs_mut()) .try_for_each(|v| v.any_stamped_write_maybe_with_changes(stamp, with_changes))?; diff --git a/crates/brk_computer/src/stateful/metrics/activity.rs b/crates/brk_computer/src/distribution/metrics/activity.rs similarity index 98% rename from crates/brk_computer/src/stateful/metrics/activity.rs rename to crates/brk_computer/src/distribution/metrics/activity.rs index 202abf7af..737ba040b 100644 --- a/crates/brk_computer/src/stateful/metrics/activity.rs +++ b/crates/brk_computer/src/distribution/metrics/activity.rs @@ -9,7 +9,7 @@ use vecdb::{ use crate::{ ComputeIndexes, - grouped::{ComputedValueVecsFromHeight, ComputedVecsFromHeight, Source, VecBuilderOptions}, + internal::{ComputedValueVecsFromHeight, ComputedVecsFromHeight, Source, VecBuilderOptions}, indexes, price, }; diff --git a/crates/brk_computer/src/stateful/metrics/config.rs b/crates/brk_computer/src/distribution/metrics/config.rs similarity index 94% rename from crates/brk_computer/src/stateful/metrics/config.rs rename to crates/brk_computer/src/distribution/metrics/config.rs index 603f8fbd5..c74933339 100644 --- a/crates/brk_computer/src/stateful/metrics/config.rs +++ b/crates/brk_computer/src/distribution/metrics/config.rs @@ -40,6 +40,8 @@ impl<'a> ImportConfig<'a> { pub fn name(&self, suffix: &str) -> String { if self.full_name.is_empty() { suffix.to_string() + } else if suffix.is_empty() { + self.full_name.to_string() } else { format!("{}_{suffix}", self.full_name) } diff --git a/crates/brk_computer/src/stateful/metrics/price/paid.rs b/crates/brk_computer/src/distribution/metrics/cost_basis.rs similarity index 50% rename from crates/brk_computer/src/stateful/metrics/price/paid.rs rename to crates/brk_computer/src/distribution/metrics/cost_basis.rs index 31a3000ba..bfe293eeb 100644 --- a/crates/brk_computer/src/stateful/metrics/price/paid.rs +++ b/crates/brk_computer/src/distribution/metrics/cost_basis.rs @@ -3,67 +3,68 @@ use brk_traversable::Traversable; use brk_types::{DateIndex, Dollars, Height, Version}; use rayon::prelude::*; use vecdb::{ - AnyStoredVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, PcoVec, + AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, + PcoVec, }; use crate::{ ComputeIndexes, - grouped::{ComputedVecsFromHeight, PricePercentiles, Source, VecBuilderOptions}, - stateful::state::CohortState, + distribution::state::CohortState, + internal::{ComputedVecsFromHeight, CostBasisPercentiles, Source, VecBuilderOptions}, }; -use super::super::ImportConfig; +use super::ImportConfig; -/// Price paid metrics. +/// Cost basis metrics. #[derive(Clone, Traversable)] -pub struct PricePaidMetrics { - /// Minimum price paid for any UTXO at this height - pub height_to_min_price_paid: EagerVec>, - pub indexes_to_min_price_paid: ComputedVecsFromHeight, +pub struct CostBasisMetrics { + /// Minimum cost basis for any UTXO at this height + pub height_to_min_cost_basis: EagerVec>, + pub indexes_to_min_cost_basis: ComputedVecsFromHeight, - /// Maximum price paid for any UTXO at this height - pub height_to_max_price_paid: EagerVec>, - pub indexes_to_max_price_paid: ComputedVecsFromHeight, + /// Maximum cost basis for any UTXO at this height + pub height_to_max_cost_basis: EagerVec>, + pub indexes_to_max_cost_basis: ComputedVecsFromHeight, - /// Price distribution percentiles (median, quartiles, etc.) - pub price_percentiles: Option, + /// Cost basis distribution percentiles (median, quartiles, etc.) + pub percentiles: Option, } -impl PricePaidMetrics { - /// Import price paid metrics from database. +impl CostBasisMetrics { + /// Import cost basis metrics from database. pub fn forced_import(cfg: &ImportConfig) -> Result { let v0 = Version::ZERO; let extended = cfg.extended(); let last = VecBuilderOptions::default().add_last(); - let height_to_min_price_paid = - EagerVec::forced_import(cfg.db, &cfg.name("min_price_paid"), cfg.version + v0)?; + let height_to_min_cost_basis = + EagerVec::forced_import(cfg.db, &cfg.name("min_cost_basis"), cfg.version + v0)?; - let height_to_max_price_paid = - EagerVec::forced_import(cfg.db, &cfg.name("max_price_paid"), cfg.version + v0)?; + let height_to_max_cost_basis = + EagerVec::forced_import(cfg.db, &cfg.name("max_cost_basis"), cfg.version + v0)?; Ok(Self { - indexes_to_min_price_paid: ComputedVecsFromHeight::forced_import( + indexes_to_min_cost_basis: ComputedVecsFromHeight::forced_import( cfg.db, - &cfg.name("min_price_paid"), - Source::Vec(height_to_min_price_paid.boxed_clone()), + &cfg.name("min_cost_basis"), + Source::Vec(height_to_min_cost_basis.boxed_clone()), cfg.version + v0, cfg.indexes, last, )?, - indexes_to_max_price_paid: ComputedVecsFromHeight::forced_import( + indexes_to_max_cost_basis: ComputedVecsFromHeight::forced_import( cfg.db, - &cfg.name("max_price_paid"), - Source::Vec(height_to_max_price_paid.boxed_clone()), + &cfg.name("max_cost_basis"), + Source::Vec(height_to_max_cost_basis.boxed_clone()), cfg.version + v0, cfg.indexes, last, )?, - height_to_min_price_paid, - height_to_max_price_paid, - price_percentiles: extended + height_to_min_cost_basis, + height_to_max_cost_basis, + percentiles: extended .then(|| { - PricePercentiles::forced_import( + CostBasisPercentiles::forced_import( cfg.db, &cfg.name(""), cfg.version + v0, @@ -75,16 +76,31 @@ impl PricePaidMetrics { }) } - /// Push min/max price paid from state. + /// Get minimum length across height-indexed vectors written in block loop. + pub fn min_stateful_height_len(&self) -> usize { + self.height_to_min_cost_basis + .len() + .min(self.height_to_max_cost_basis.len()) + } + + /// Get minimum length across dateindex-indexed vectors written in block loop. + pub fn min_stateful_dateindex_len(&self) -> usize { + self.percentiles + .as_ref() + .map(|p| p.min_stateful_dateindex_len()) + .unwrap_or(usize::MAX) + } + + /// Push min/max cost basis from state. pub fn truncate_push_minmax(&mut self, height: Height, state: &CohortState) -> Result<()> { - self.height_to_min_price_paid.truncate_push( + self.height_to_min_cost_basis.truncate_push( height, state .price_to_amount_first_key_value() .map(|(dollars, _)| dollars) .unwrap_or(Dollars::NAN), )?; - self.height_to_max_price_paid.truncate_push( + self.height_to_max_cost_basis.truncate_push( height, state .price_to_amount_last_key_value() @@ -94,26 +110,26 @@ impl PricePaidMetrics { Ok(()) } - /// Push price percentiles from state at date boundary. + /// Push cost basis percentiles from state at date boundary. /// Only called when at the last height of a day. pub fn truncate_push_percentiles( &mut self, dateindex: DateIndex, state: &CohortState, ) -> Result<()> { - if let Some(price_percentiles) = self.price_percentiles.as_mut() { + if let Some(percentiles) = self.percentiles.as_mut() { let percentile_prices = state.compute_percentile_prices(); - price_percentiles.truncate_push(dateindex, &percentile_prices)?; + percentiles.truncate_push(dateindex, &percentile_prices)?; } Ok(()) } /// Write height-indexed vectors to disk. pub fn write(&mut self) -> Result<()> { - self.height_to_min_price_paid.write()?; - self.height_to_max_price_paid.write()?; - if let Some(price_percentiles) = self.price_percentiles.as_mut() { - price_percentiles.write()?; + self.height_to_min_cost_basis.write()?; + self.height_to_max_cost_basis.write()?; + if let Some(percentiles) = self.percentiles.as_mut() { + percentiles.write()?; } Ok(()) } @@ -121,12 +137,13 @@ impl PricePaidMetrics { /// Returns a parallel iterator over all vecs for parallel writing. pub fn par_iter_mut(&mut self) -> impl ParallelIterator { let mut vecs: Vec<&mut dyn AnyStoredVec> = vec![ - &mut self.height_to_min_price_paid, - &mut self.height_to_max_price_paid, + &mut self.height_to_min_cost_basis, + &mut self.height_to_max_cost_basis, ]; - if let Some(pp) = self.price_percentiles.as_mut() { + if let Some(percentiles) = self.percentiles.as_mut() { vecs.extend( - pp.vecs + percentiles + .vecs .iter_mut() .flatten() .filter_map(|v| v.dateindex.as_mut()) @@ -138,8 +155,8 @@ impl PricePaidMetrics { /// Validate computed versions or reset if mismatched. pub fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { - if let Some(price_percentiles) = self.price_percentiles.as_mut() { - price_percentiles.validate_computed_version_or_reset(base_version)?; + if let Some(percentiles) = self.percentiles.as_mut() { + percentiles.validate_computed_version_or_reset(base_version)?; } Ok(()) } @@ -151,19 +168,19 @@ impl PricePaidMetrics { others: &[&Self], exit: &Exit, ) -> Result<()> { - self.height_to_min_price_paid.compute_min_of_others( + self.height_to_min_cost_basis.compute_min_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_min_price_paid) + .map(|v| &v.height_to_min_cost_basis) .collect::>(), exit, )?; - self.height_to_max_price_paid.compute_max_of_others( + self.height_to_max_cost_basis.compute_max_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_max_price_paid) + .map(|v| &v.height_to_max_cost_basis) .collect::>(), exit, )?; @@ -177,18 +194,18 @@ impl PricePaidMetrics { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_min_price_paid.compute_rest( + self.indexes_to_min_cost_basis.compute_rest( indexes, starting_indexes, exit, - Some(&self.height_to_min_price_paid), + Some(&self.height_to_min_cost_basis), )?; - self.indexes_to_max_price_paid.compute_rest( + self.indexes_to_max_cost_basis.compute_rest( indexes, starting_indexes, exit, - Some(&self.height_to_max_price_paid), + Some(&self.height_to_max_cost_basis), )?; Ok(()) diff --git a/crates/brk_computer/src/stateful/metrics/mod.rs b/crates/brk_computer/src/distribution/metrics/mod.rs similarity index 78% rename from crates/brk_computer/src/stateful/metrics/mod.rs rename to crates/brk_computer/src/distribution/metrics/mod.rs index 8503fcd4a..99f375f8c 100644 --- a/crates/brk_computer/src/stateful/metrics/mod.rs +++ b/crates/brk_computer/src/distribution/metrics/mod.rs @@ -1,16 +1,18 @@ mod activity; mod config; -mod price; +mod cost_basis; mod realized; +mod relative; mod supply; mod unrealized; -pub use activity::ActivityMetrics; -pub use config::ImportConfig; -pub use price::{PricePaidMetrics, RelativeMetrics}; -pub use realized::RealizedMetrics; -pub use supply::SupplyMetrics; -pub use unrealized::UnrealizedMetrics; +pub use activity::*; +pub use config::*; +pub use cost_basis::*; +pub use realized::*; +pub use relative::*; +pub use supply::*; +pub use unrealized::*; use brk_cohort::Filter; use brk_error::Result; @@ -19,7 +21,7 @@ use brk_types::{Bitcoin, DateIndex, Dollars, Height, Version}; use rayon::prelude::*; use vecdb::{AnyStoredVec, Exit, IterableVec}; -use crate::{ComputeIndexes, indexes, price as price_vecs, stateful::state::CohortState}; +use crate::{ComputeIndexes, distribution::state::CohortState, indexes, price as price_vecs}; /// All metrics for a cohort, organized by category. #[derive(Clone, Traversable)] @@ -39,8 +41,8 @@ pub struct CohortMetrics { /// Unrealized profit/loss (requires price data) pub unrealized: Option, - /// Price paid metrics (requires price data) - pub price_paid: Option, + /// Cost basis metrics (requires price data) + pub cost_basis: Option, /// Relative metrics (requires price data) pub relative: Option, @@ -73,17 +75,43 @@ impl CohortMetrics { realized: compute_dollars .then(|| RealizedMetrics::forced_import(cfg)) .transpose()?, - price_paid: compute_dollars - .then(|| PricePaidMetrics::forced_import(cfg)) + cost_basis: compute_dollars + .then(|| CostBasisMetrics::forced_import(cfg)) .transpose()?, relative, unrealized, }) } - /// Get minimum length across height-indexed vectors. - pub fn min_len(&self) -> usize { - self.supply.min_len().min(self.activity.min_len()) + /// Get minimum length across height-indexed vectors written in block loop. + pub fn min_stateful_height_len(&self) -> usize { + let mut min = self.supply.min_len().min(self.activity.min_len()); + + if let Some(realized) = &self.realized { + min = min.min(realized.min_stateful_height_len()); + } + if let Some(unrealized) = &self.unrealized { + min = min.min(unrealized.min_stateful_height_len()); + } + if let Some(cost_basis) = &self.cost_basis { + min = min.min(cost_basis.min_stateful_height_len()); + } + + min + } + + /// Get minimum length across dateindex-indexed vectors written in block loop. + pub fn min_stateful_dateindex_len(&self) -> usize { + let mut min = usize::MAX; + + if let Some(unrealized) = &self.unrealized { + min = min.min(unrealized.min_stateful_dateindex_len()); + } + if let Some(cost_basis) = &self.cost_basis { + min = min.min(cost_basis.min_stateful_dateindex_len()); + } + + min } /// Push state values to height-indexed vectors. @@ -118,8 +146,8 @@ impl CohortMetrics { unrealized.write()?; } - if let Some(price_paid) = self.price_paid.as_mut() { - price_paid.write()?; + if let Some(cost_basis) = self.cost_basis.as_mut() { + cost_basis.write()?; } Ok(()) @@ -140,8 +168,8 @@ impl CohortMetrics { vecs.extend(unrealized.par_iter_mut().collect::>()); } - if let Some(price_paid) = self.price_paid.as_mut() { - vecs.extend(price_paid.par_iter_mut().collect::>()); + if let Some(cost_basis) = self.cost_basis.as_mut() { + vecs.extend(cost_basis.par_iter_mut().collect::>()); } vecs.into_par_iter() @@ -156,8 +184,8 @@ impl CohortMetrics { realized.validate_computed_versions(base_version)?; } - if let Some(price_paid) = self.price_paid.as_mut() { - price_paid.validate_computed_versions(base_version)?; + if let Some(cost_basis) = self.cost_basis.as_mut() { + cost_basis.validate_computed_versions(base_version)?; } Ok(()) @@ -176,12 +204,12 @@ impl CohortMetrics { // Apply pending updates before reading state.apply_pending(); - if let (Some(unrealized), Some(price_paid), Some(height_price)) = ( + if let (Some(unrealized), Some(cost_basis), Some(height_price)) = ( self.unrealized.as_mut(), - self.price_paid.as_mut(), + self.cost_basis.as_mut(), height_price, ) { - price_paid.truncate_push_minmax(height, state)?; + cost_basis.truncate_push_minmax(height, state)?; let (height_unrealized_state, date_unrealized_state) = state.compute_unrealized_states(height_price, date_price.unwrap()); @@ -195,7 +223,7 @@ impl CohortMetrics { // Only compute expensive percentiles at date boundaries (~144x reduction) if let Some(dateindex) = dateindex { - price_paid.truncate_push_percentiles(dateindex, state)?; + cost_basis.truncate_push_percentiles(dateindex, state)?; } } @@ -242,12 +270,12 @@ impl CohortMetrics { )?; } - if let Some(price_paid) = self.price_paid.as_mut() { - price_paid.compute_from_stateful( + if let Some(cost_basis) = self.cost_basis.as_mut() { + cost_basis.compute_from_stateful( starting_indexes, &others .iter() - .filter_map(|v| v.price_paid.as_ref()) + .filter_map(|v| v.cost_basis.as_ref()) .collect::>(), exit, )?; @@ -277,8 +305,8 @@ impl CohortMetrics { unrealized.compute_rest_part1(price, starting_indexes, exit)?; } - if let Some(price_paid) = self.price_paid.as_mut() { - price_paid.compute_rest_part1(indexes, starting_indexes, exit)?; + if let Some(cost_basis) = self.cost_basis.as_mut() { + cost_basis.compute_rest_part1(indexes, starting_indexes, exit)?; } Ok(()) diff --git a/crates/brk_computer/src/stateful/metrics/realized.rs b/crates/brk_computer/src/distribution/metrics/realized.rs similarity index 93% rename from crates/brk_computer/src/stateful/metrics/realized.rs rename to crates/brk_computer/src/distribution/metrics/realized.rs index 747f0d32a..79d879d08 100644 --- a/crates/brk_computer/src/stateful/metrics/realized.rs +++ b/crates/brk_computer/src/distribution/metrics/realized.rs @@ -3,19 +3,19 @@ use brk_traversable::Traversable; use brk_types::{Bitcoin, DateIndex, Dollars, Height, StoredF32, StoredF64, Version}; use rayon::prelude::*; use vecdb::{ - AnyStoredVec, EagerVec, Exit, GenericStoredVec, Ident, ImportableVec, IterableCloneableVec, - IterableVec, Negate, PcoVec, + AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, Ident, ImportableVec, + IterableCloneableVec, IterableVec, Negate, PcoVec, }; use crate::{ ComputeIndexes, - grouped::{ + internal::{ ComputedRatioVecsFromDateIndex, ComputedVecsFromDateIndex, ComputedVecsFromHeight, - LazyVecsFrom2FromHeight, LazyVecsFromHeight, PercentageDollarsF32, Source, - VecBuilderOptions, + LazyVecsFrom2FromHeight, LazyVecsFromDateIndex, LazyVecsFromHeight, PercentageDollarsF32, + Source, StoredF32Identity, VecBuilderOptions, }, indexes, price, - stateful::state::RealizedState, + distribution::state::RealizedState, utils::OptionExt, }; @@ -32,6 +32,10 @@ pub struct RealizedMetrics { pub indexes_to_realized_cap_rel_to_own_market_cap: Option>, pub indexes_to_realized_cap_30d_delta: ComputedVecsFromDateIndex, + // === MVRV (Market Value to Realized Value) === + // Proxy for indexes_to_realized_price_extra.ratio (close / realized_price = market_cap / realized_cap) + pub indexes_to_mvrv: LazyVecsFromDateIndex, + // === Realized Profit/Loss === pub height_to_realized_profit: EagerVec>, pub indexes_to_realized_profit: ComputedVecsFromHeight, @@ -241,21 +245,38 @@ impl RealizedMetrics { }) .transpose()?; + // Create realized_price_extra first so we can reference its ratio for MVRV proxy + let indexes_to_realized_price_extra = ComputedRatioVecsFromDateIndex::forced_import( + cfg.db, + &cfg.name("realized_price"), + Some(&indexes_to_realized_price), + cfg.version + v0, + cfg.indexes, + extended, + cfg.price, + )?; + + // MVRV is a lazy proxy for realized_price_extra.ratio + // ratio = close / realized_price = market_cap / realized_cap = MVRV + let indexes_to_mvrv = LazyVecsFromDateIndex::from_computed::( + &cfg.name("mvrv"), + cfg.version + v0, + indexes_to_realized_price_extra + .ratio + .dateindex + .as_ref() + .map(|v| v.boxed_clone()), + &indexes_to_realized_price_extra.ratio, + ); + Ok(Self { // === Realized Cap === height_to_realized_cap, indexes_to_realized_cap, - indexes_to_realized_price_extra: ComputedRatioVecsFromDateIndex::forced_import( - cfg.db, - &cfg.name("realized_price"), - Some(&indexes_to_realized_price), - cfg.version + v0, - cfg.indexes, - extended, - cfg.price, - )?, + indexes_to_realized_price_extra, indexes_to_realized_price, + indexes_to_mvrv, indexes_to_realized_cap_rel_to_own_market_cap: extended .then(|| { ComputedVecsFromHeight::forced_import( @@ -449,6 +470,26 @@ impl RealizedMetrics { }) } + /// Get minimum length across height-indexed vectors written in block loop. + pub fn min_stateful_height_len(&self) -> usize { + let mut min = self + .height_to_realized_cap + .len() + .min(self.height_to_realized_profit.len()) + .min(self.height_to_realized_loss.len()) + .min(self.height_to_value_created.len()) + .min(self.height_to_value_destroyed.len()); + + if let Some(v) = &self.height_to_adjusted_value_created { + min = min.min(v.len()); + } + if let Some(v) = &self.height_to_adjusted_value_destroyed { + min = min.min(v.len()); + } + + min + } + /// Push realized state values to height-indexed vectors. pub fn truncate_push(&mut self, height: Height, state: &RealizedState) -> Result<()> { self.height_to_realized_cap diff --git a/crates/brk_computer/src/stateful/metrics/price/relative.rs b/crates/brk_computer/src/distribution/metrics/relative.rs similarity index 97% rename from crates/brk_computer/src/stateful/metrics/price/relative.rs rename to crates/brk_computer/src/distribution/metrics/relative.rs index 8a07bfe0c..62f4d4c19 100644 --- a/crates/brk_computer/src/stateful/metrics/price/relative.rs +++ b/crates/brk_computer/src/distribution/metrics/relative.rs @@ -3,12 +3,12 @@ use brk_traversable::Traversable; use brk_types::{Bitcoin, Dollars, Height, Sats, StoredF32, StoredF64, Version}; use vecdb::{IterableCloneableVec, LazyVecFrom2}; -use crate::grouped::{ +use crate::internal::{ LazyVecsFrom2FromDateIndex, NegPercentageDollarsF32, NegRatio32, PercentageBtcF64, PercentageDollarsF32, PercentageSatsF64, Ratio32, }; -use super::super::{ImportConfig, SupplyMetrics, UnrealizedMetrics}; +use super::{ImportConfig, SupplyMetrics, UnrealizedMetrics}; /// Relative metrics comparing cohort values to global values. /// All `rel_to_` vecs are lazy - computed on-demand from their sources. @@ -56,6 +56,10 @@ pub struct RelativeMetrics { pub indexes_to_net_unrealized_pnl_rel_to_market_cap: Option>, + // === NUPL (Net Unrealized Profit/Loss) === + // Proxy for indexes_to_net_unrealized_pnl_rel_to_market_cap + pub indexes_to_nupl: Option>, + // === Unrealized vs Own Market Cap (lazy) === pub height_to_unrealized_profit_rel_to_own_market_cap: Option>, @@ -285,6 +289,16 @@ impl RelativeMetrics { ) }), + // NUPL is a proxy for net_unrealized_pnl_rel_to_market_cap + indexes_to_nupl: global_market_cap.map(|mc| { + LazyVecsFrom2FromDateIndex::from_computed::( + &cfg.name("nupl"), + cfg.version + v2, + &unrealized.indexes_to_net_unrealized_pnl, + mc, + ) + }), + // === Unrealized vs Own Market Cap (lazy, optional) === height_to_unrealized_profit_rel_to_own_market_cap: (extended && compute_rel_to_all) .then(|| { diff --git a/crates/brk_computer/src/stateful/metrics/supply.rs b/crates/brk_computer/src/distribution/metrics/supply.rs similarity index 98% rename from crates/brk_computer/src/stateful/metrics/supply.rs rename to crates/brk_computer/src/distribution/metrics/supply.rs index 8c6b6ba11..87d640067 100644 --- a/crates/brk_computer/src/stateful/metrics/supply.rs +++ b/crates/brk_computer/src/distribution/metrics/supply.rs @@ -9,7 +9,7 @@ use vecdb::{ use crate::{ ComputeIndexes, - grouped::{ + internal::{ ComputedHeightValueVecs, ComputedValueVecsFromDateIndex, ComputedVecsFromHeight, HalfClosePriceTimesSats, HalveDollars, HalveSats, HalveSatsToBitcoin, LazyHeightValueVecs, LazyValueVecsFromDateIndex, Source, VecBuilderOptions, @@ -57,7 +57,7 @@ impl SupplyMetrics { let price_source = cfg .price - .map(|p| p.chainindexes_to_price_close.height.boxed_clone()); + .map(|p| p.usd.chainindexes_to_price_close.height.boxed_clone()); let height_to_supply_value = ComputedHeightValueVecs::forced_import( cfg.db, diff --git a/crates/brk_computer/src/stateful/metrics/unrealized.rs b/crates/brk_computer/src/distribution/metrics/unrealized.rs similarity index 94% rename from crates/brk_computer/src/stateful/metrics/unrealized.rs rename to crates/brk_computer/src/distribution/metrics/unrealized.rs index 013b9a031..5b26e88df 100644 --- a/crates/brk_computer/src/stateful/metrics/unrealized.rs +++ b/crates/brk_computer/src/distribution/metrics/unrealized.rs @@ -3,17 +3,17 @@ use brk_traversable::Traversable; use brk_types::{DateIndex, Dollars, Height, Sats, Version}; use rayon::prelude::*; use vecdb::{ - AnyStoredVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, + AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, LazyVecFrom2, Negate, PcoVec, }; use crate::{ ComputeIndexes, - grouped::{ + internal::{ ComputedHeightValueVecs, ComputedValueVecsFromDateIndex, ComputedVecsFromDateIndex, DollarsMinus, DollarsPlus, LazyVecsFromDateIndex, Source, VecBuilderOptions, }, - stateful::state::UnrealizedState, + distribution::state::UnrealizedState, }; use super::ImportConfig; @@ -144,7 +144,7 @@ impl UnrealizedMetrics { let price_source = cfg .price - .map(|p| p.chainindexes_to_price_close.height.boxed_clone()); + .map(|p| p.usd.chainindexes_to_price_close.height.boxed_clone()); let height_to_supply_in_profit_value = ComputedHeightValueVecs::forced_import( cfg.db, @@ -205,6 +205,24 @@ impl UnrealizedMetrics { }) } + /// Get minimum length across height-indexed vectors written in block loop. + pub fn min_stateful_height_len(&self) -> usize { + self.height_to_supply_in_profit + .len() + .min(self.height_to_supply_in_loss.len()) + .min(self.height_to_unrealized_profit.len()) + .min(self.height_to_unrealized_loss.len()) + } + + /// Get minimum length across dateindex-indexed vectors written in block loop. + pub fn min_stateful_dateindex_len(&self) -> usize { + self.dateindex_to_supply_in_profit + .len() + .min(self.dateindex_to_supply_in_loss.len()) + .min(self.dateindex_to_unrealized_profit.len()) + .min(self.dateindex_to_unrealized_loss.len()) + } + /// Push unrealized state values to height-indexed vectors. pub fn truncate_push( &mut self, diff --git a/crates/brk_computer/src/stateful/mod.rs b/crates/brk_computer/src/distribution/mod.rs similarity index 88% rename from crates/brk_computer/src/stateful/mod.rs rename to crates/brk_computer/src/distribution/mod.rs index 985f7e764..a8624d84f 100644 --- a/crates/brk_computer/src/stateful/mod.rs +++ b/crates/brk_computer/src/distribution/mod.rs @@ -10,7 +10,7 @@ mod vecs; pub use range_map::RangeMap; pub use vecs::Vecs; -pub const DB_NAME: &str = "stateful"; +pub const DB_NAME: &str = "distribution"; pub use address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs}; pub use cohorts::{AddressCohorts, DynCohortVecs, UTXOCohorts}; diff --git a/crates/brk_computer/src/stateful/range_map.rs b/crates/brk_computer/src/distribution/range_map.rs similarity index 100% rename from crates/brk_computer/src/stateful/range_map.rs rename to crates/brk_computer/src/distribution/range_map.rs diff --git a/crates/brk_computer/src/stateful/state/block.rs b/crates/brk_computer/src/distribution/state/block.rs similarity index 100% rename from crates/brk_computer/src/stateful/state/block.rs rename to crates/brk_computer/src/distribution/state/block.rs diff --git a/crates/brk_computer/src/stateful/state/cohort/address.rs b/crates/brk_computer/src/distribution/state/cohort/address.rs similarity index 100% rename from crates/brk_computer/src/stateful/state/cohort/address.rs rename to crates/brk_computer/src/distribution/state/cohort/address.rs diff --git a/crates/brk_computer/src/stateful/state/cohort/base.rs b/crates/brk_computer/src/distribution/state/cohort/base.rs similarity index 99% rename from crates/brk_computer/src/stateful/state/cohort/base.rs rename to crates/brk_computer/src/distribution/state/cohort/base.rs index 6a5af7e9d..ee613cf63 100644 --- a/crates/brk_computer/src/stateful/state/cohort/base.rs +++ b/crates/brk_computer/src/distribution/state/cohort/base.rs @@ -3,7 +3,7 @@ use std::path::Path; use brk_error::Result; use brk_types::{Dollars, Height, Sats, SupplyState}; -use crate::grouped::PERCENTILES_LEN; +use crate::internal::PERCENTILES_LEN; use super::super::cost_basis::{ CachedUnrealizedState, PriceToAmount, RealizedState, UnrealizedState, diff --git a/crates/brk_computer/src/stateful/state/cohort/mod.rs b/crates/brk_computer/src/distribution/state/cohort/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/state/cohort/mod.rs rename to crates/brk_computer/src/distribution/state/cohort/mod.rs diff --git a/crates/brk_computer/src/stateful/state/cohort/utxo.rs b/crates/brk_computer/src/distribution/state/cohort/utxo.rs similarity index 100% rename from crates/brk_computer/src/stateful/state/cohort/utxo.rs rename to crates/brk_computer/src/distribution/state/cohort/utxo.rs diff --git a/crates/brk_computer/src/stateful/state/cost_basis/mod.rs b/crates/brk_computer/src/distribution/state/cost_basis/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/state/cost_basis/mod.rs rename to crates/brk_computer/src/distribution/state/cost_basis/mod.rs diff --git a/crates/brk_computer/src/stateful/state/cost_basis/price_to_amount.rs b/crates/brk_computer/src/distribution/state/cost_basis/price_to_amount.rs similarity index 99% rename from crates/brk_computer/src/stateful/state/cost_basis/price_to_amount.rs rename to crates/brk_computer/src/distribution/state/cost_basis/price_to_amount.rs index 9f0840964..fc07996fa 100644 --- a/crates/brk_computer/src/stateful/state/cost_basis/price_to_amount.rs +++ b/crates/brk_computer/src/distribution/state/cost_basis/price_to_amount.rs @@ -14,7 +14,7 @@ use serde::{Deserialize, Serialize}; use vecdb::Bytes; use crate::{ - grouped::{PERCENTILES, PERCENTILES_LEN}, + internal::{PERCENTILES, PERCENTILES_LEN}, utils::OptionExt, }; diff --git a/crates/brk_computer/src/stateful/state/cost_basis/realized.rs b/crates/brk_computer/src/distribution/state/cost_basis/realized.rs similarity index 100% rename from crates/brk_computer/src/stateful/state/cost_basis/realized.rs rename to crates/brk_computer/src/distribution/state/cost_basis/realized.rs diff --git a/crates/brk_computer/src/stateful/state/cost_basis/unrealized.rs b/crates/brk_computer/src/distribution/state/cost_basis/unrealized.rs similarity index 100% rename from crates/brk_computer/src/stateful/state/cost_basis/unrealized.rs rename to crates/brk_computer/src/distribution/state/cost_basis/unrealized.rs diff --git a/crates/brk_computer/src/stateful/state/mod.rs b/crates/brk_computer/src/distribution/state/mod.rs similarity index 100% rename from crates/brk_computer/src/stateful/state/mod.rs rename to crates/brk_computer/src/distribution/state/mod.rs diff --git a/crates/brk_computer/src/stateful/state/transacted.rs b/crates/brk_computer/src/distribution/state/transacted.rs similarity index 100% rename from crates/brk_computer/src/stateful/state/transacted.rs rename to crates/brk_computer/src/distribution/state/transacted.rs diff --git a/crates/brk_computer/src/stateful/vecs.rs b/crates/brk_computer/src/distribution/vecs.rs similarity index 68% rename from crates/brk_computer/src/stateful/vecs.rs rename to crates/brk_computer/src/distribution/vecs.rs index bb87df3a0..fbf3ffb60 100644 --- a/crates/brk_computer/src/stateful/vecs.rs +++ b/crates/brk_computer/src/distribution/vecs.rs @@ -4,28 +4,25 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{ - Dollars, EmptyAddressData, EmptyAddressIndex, Height, LoadedAddressData, LoadedAddressIndex, - Sats, StoredU64, SupplyState, Version, + EmptyAddressData, EmptyAddressIndex, Height, LoadedAddressData, LoadedAddressIndex, StoredU64, + SupplyState, Version, }; use log::info; use vecdb::{ - AnyVec, BytesVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, - IterableCloneableVec, LazyVecFrom1, PAGE_SIZE, PcoVec, Stamp, TypedVecIterator, VecIndex, + AnyVec, BytesVec, Database, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, + LazyVecFrom1, PAGE_SIZE, Stamp, TypedVecIterator, VecIndex, }; use crate::{ - ComputeIndexes, chain, - grouped::{ - ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, - VecBuilderOptions, - }, - indexes, price, - stateful::{ + ComputeIndexes, blocks, + distribution::{ compute::{StartMode, determine_start_mode, process_blocks, recover_state, reset_state}, state::BlockState, }, - txins, - utils::OptionExt, + indexes, + inputs, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, + outputs, price, transactions, }; use super::{ @@ -48,19 +45,13 @@ pub struct Vecs { pub utxo_cohorts: UTXOCohorts, pub address_cohorts: AddressCohorts, - pub height_to_unspendable_supply: EagerVec>, - pub height_to_opreturn_supply: EagerVec>, pub addresstype_to_height_to_addr_count: AddressTypeToHeightToAddressCount, pub addresstype_to_height_to_empty_addr_count: AddressTypeToHeightToAddressCount, pub addresstype_to_indexes_to_addr_count: AddressTypeToIndexesToAddressCount, pub addresstype_to_indexes_to_empty_addr_count: AddressTypeToIndexesToAddressCount, - pub indexes_to_unspendable_supply: ComputedValueVecsFromHeight, - pub indexes_to_opreturn_supply: ComputedValueVecsFromHeight, pub indexes_to_addr_count: ComputedVecsFromHeight, pub indexes_to_empty_addr_count: ComputedVecsFromHeight, - pub height_to_market_cap: Option>, - pub indexes_to_market_cap: Option>, pub loadedaddressindex_to_loadedaddressindex: LazyVecFrom1, pub emptyaddressindex_to_emptyaddressindex: @@ -83,9 +74,7 @@ impl Vecs { db.set_min_len(PAGE_SIZE * 20_000_000)?; db.set_min_regions(50_000)?; - let compute_dollars = price.is_some(); let v0 = version + VERSION + Version::ZERO; - let v2 = version + VERSION + Version::TWO; let utxo_cohorts = UTXOCohorts::forced_import(&db, version, indexes, price, &states_path)?; @@ -123,30 +112,6 @@ impl Vecs { |index, _| Some(index), ); - let height_to_unspendable_supply: EagerVec> = - EagerVec::forced_import(&db, "unspendable_supply", v0)?; - let indexes_to_unspendable_supply = ComputedValueVecsFromHeight::forced_import( - &db, - "unspendable_supply", - Source::Vec(height_to_unspendable_supply.boxed_clone()), - v0, - VecBuilderOptions::default().add_last(), - compute_dollars, - indexes, - )?; - - let height_to_opreturn_supply: EagerVec> = - EagerVec::forced_import(&db, "opreturn_supply", v0)?; - let indexes_to_opreturn_supply = ComputedValueVecsFromHeight::forced_import( - &db, - "opreturn_supply", - Source::Vec(height_to_opreturn_supply.boxed_clone()), - v0, - VecBuilderOptions::default().add_last(), - compute_dollars, - indexes, - )?; - // Extract address type height vecs before struct literal to use as sources let addresstype_to_height_to_addr_count = AddressTypeToHeightToAddressCount::forced_import(&db, "addr_count", v0)?; @@ -159,11 +124,6 @@ impl Vecs { .with_saved_stamped_changes(SAVED_STAMPED_CHANGES), )?, - height_to_unspendable_supply, - indexes_to_unspendable_supply, - height_to_opreturn_supply, - indexes_to_opreturn_supply, - indexes_to_addr_count: ComputedVecsFromHeight::forced_import( &db, "addr_count", @@ -181,35 +141,6 @@ impl Vecs { VecBuilderOptions::default().add_last(), )?, - height_to_market_cap: compute_dollars.then(|| { - LazyVecFrom1::init( - "market_cap", - v0, - utxo_cohorts - .all - .metrics - .supply - .height_to_supply_value - .dollars - .as_ref() - .unwrap() - .boxed_clone(), - |height: Height, iter| iter.get(height), - ) - }), - - indexes_to_market_cap: compute_dollars.then(|| { - ComputedVecsFromDateIndex::forced_import( - &db, - "market_cap", - Source::Compute, - v2, - indexes, - VecBuilderOptions::default().add_last(), - ) - .unwrap() - }), - addresstype_to_indexes_to_addr_count: AddressTypeToIndexesToAddressCount::forced_import( &db, "addr_count", @@ -265,32 +196,19 @@ impl Vecs { &mut self, indexer: &Indexer, indexes: &indexes::Vecs, - txins: &txins::Vecs, - chain: &chain::Vecs, + inputs: &inputs::Vecs, + outputs: &outputs::Vecs, + transactions: &transactions::Vecs, + blocks: &blocks::Vecs, price: Option<&price::Vecs>, starting_indexes: &mut ComputeIndexes, exit: &Exit, ) -> Result<()> { // 1. Find minimum computed height for recovery let chain_state_height = Height::from(self.chain_state.len()); - - // Get minimum heights without holding mutable references - let utxo_min = self.utxo_cohorts.min_separate_height_vecs_len(); - let address_min = self.address_cohorts.min_separate_height_vecs_len(); - - let stateful_min = utxo_min - .min(address_min) - .min(Height::from(self.chain_state.len())) - .min(self.any_address_indexes.min_stamped_height()) - .min(self.addresses_data.min_stamped_height()) - .min(Height::from(self.height_to_unspendable_supply.len())) - .min(Height::from(self.height_to_opreturn_supply.len())) - .min(Height::from( - self.addresstype_to_height_to_addr_count.min_len(), - )) - .min(Height::from( - self.addresstype_to_height_to_empty_addr_count.min_len(), - )); + let height_based_min = self.min_stateful_height_len(); + let dateindex_min = self.min_stateful_dateindex_len(); + let stateful_min = adjust_for_dateindex_gap(height_based_min, dateindex_min, indexes)?; // 2. Determine start mode and recover/reset state let start_mode = determine_start_mode(stateful_min, chain_state_height); @@ -324,8 +242,6 @@ impl Vecs { // Fresh start: reset all state let (starting_height, mut chain_state) = if recovered_height.is_zero() { self.chain_state.reset()?; - self.height_to_unspendable_supply.reset()?; - self.height_to_opreturn_supply.reset()?; self.addresstype_to_height_to_addr_count.reset()?; self.addresstype_to_height_to_empty_addr_count.reset()?; reset_state( @@ -339,8 +255,8 @@ impl Vecs { (Height::ZERO, vec![]) } else { // Recover chain_state from stored values - let height_to_timestamp = &indexes.block.height_to_timestamp_fixed; - let height_to_price = price.map(|p| &p.chainindexes_to_price_close.height); + let height_to_timestamp = &blocks.time.height_to_timestamp_fixed; + let height_to_price = price.map(|p| &p.usd.chainindexes_to_price_close.height); let mut height_to_timestamp_iter = height_to_timestamp.into_iter(); let mut height_to_price_iter = height_to_price.map(|v| v.into_iter()); @@ -382,8 +298,10 @@ impl Vecs { self, indexer, indexes, - txins, - chain, + inputs, + outputs, + transactions, + blocks, price, starting_height, last_height, @@ -410,61 +328,22 @@ impl Vecs { exit, )?; - // 7. Compute indexes_to_market_cap from dateindex supply - if let Some(indexes_to_market_cap) = self.indexes_to_market_cap.as_mut() { - indexes_to_market_cap.compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - self.utxo_cohorts - .all - .metrics - .supply - .indexes_to_supply - .dollars - .as_ref() - .unwrap() - .dateindex - .as_ref() - .unwrap(), - |(i, v, ..)| (i, v), - exit, - )?; - Ok(()) - })?; - } + // 7. Compute rest part2 (relative metrics) + let supply_metrics = &self.utxo_cohorts.all.metrics.supply; - // 7b. Compute indexes for unspendable and opreturn supply - self.indexes_to_unspendable_supply.compute_rest( - indexes, - price, - starting_indexes, - exit, - Some(&self.height_to_unspendable_supply), - )?; - self.indexes_to_opreturn_supply.compute_rest( - indexes, - price, - starting_indexes, - exit, - Some(&self.height_to_opreturn_supply), - )?; + let height_to_supply = &supply_metrics.height_to_supply_value.bitcoin.clone(); - // 8. Compute rest part2 (relative metrics) - let height_to_supply = &self - .utxo_cohorts - .all - .metrics - .supply + let height_to_market_cap = supply_metrics .height_to_supply_value - .bitcoin - .clone(); - - let height_to_market_cap = self.height_to_market_cap.clone(); - - let dateindex_to_market_cap = self - .indexes_to_market_cap + .dollars .as_ref() - .map(|v| v.dateindex.u().clone()); + .cloned(); + + let dateindex_to_market_cap = supply_metrics + .indexes_to_supply + .dollars + .as_ref() + .and_then(|v| v.dateindex.as_ref().cloned()); let height_to_market_cap_ref = height_to_market_cap.as_ref(); let dateindex_to_market_cap_ref = dateindex_to_market_cap.as_ref(); @@ -490,4 +369,67 @@ impl Vecs { self.db.flush()?; Ok(()) } + + /// Get minimum length across all height-indexed stateful vectors. + fn min_stateful_height_len(&self) -> Height { + self.utxo_cohorts + .min_separate_stateful_height_len() + .min(self.address_cohorts.min_separate_stateful_height_len()) + .min(Height::from(self.chain_state.len())) + .min(self.any_address_indexes.min_stamped_height()) + .min(self.addresses_data.min_stamped_height()) + .min(Height::from( + self.addresstype_to_height_to_addr_count.min_len(), + )) + .min(Height::from( + self.addresstype_to_height_to_empty_addr_count.min_len(), + )) + } + + /// Get minimum length across all dateindex-indexed stateful vectors. + fn min_stateful_dateindex_len(&self) -> usize { + self.utxo_cohorts + .min_separate_stateful_dateindex_len() + .min(self.utxo_cohorts.min_aggregate_stateful_dateindex_len()) + .min(self.address_cohorts.min_separate_stateful_dateindex_len()) + } +} + +/// Adjust start height if dateindex vecs are behind where they should be. +/// +/// To resume at height H (in day D), we need days 0..D-1 complete in dateindex vecs. +/// If dateindex vecs only have length N < D, restart from the first height of day N. +fn adjust_for_dateindex_gap( + height_based_min: Height, + dateindex_min: usize, + indexes: &indexes::Vecs, +) -> Result { + // Skip check if no dateindex vecs exist or starting from zero + if dateindex_min == usize::MAX || height_based_min.is_zero() { + return Ok(height_based_min); + } + + // Skip if height_to_dateindex doesn't cover height_based_min yet + if height_based_min.to_usize() >= indexes.block.height_to_dateindex.len() { + return Ok(height_based_min); + } + + // Get the dateindex at the height we want to resume at + let required_dateindex: usize = indexes + .block + .height_to_dateindex + .read_once(height_based_min)? + .into(); + + // If dateindex vecs are behind, restart from first height of the missing day + if dateindex_min < required_dateindex + && dateindex_min < indexes.time.dateindex_to_first_height.len() + { + Ok(indexes + .time + .dateindex_to_first_height + .read_once(dateindex_min.into())?) + } else { + Ok(height_based_min) + } } diff --git a/crates/brk_computer/src/fetched.rs b/crates/brk_computer/src/fetched.rs deleted file mode 100644 index d30219277..000000000 --- a/crates/brk_computer/src/fetched.rs +++ /dev/null @@ -1,140 +0,0 @@ -use std::path::Path; - -use brk_error::Result; -use brk_fetcher::Fetcher; -use brk_indexer::Indexer; -use brk_traversable::Traversable; -use brk_types::{DateIndex, Height, OHLCCents, Version}; -use vecdb::{ - AnyStoredVec, AnyVec, BytesVec, Database, Exit, GenericStoredVec, ImportableVec, IterableVec, - PAGE_SIZE, TypedVecIterator, VecIndex, -}; - -use super::{ComputeIndexes, indexes, utils::OptionExt}; - -pub const DB_NAME: &str = "fetched"; - -#[derive(Clone, Traversable)] -pub struct Vecs { - db: Database, - fetcher: Fetcher, - - pub dateindex_to_price_ohlc_in_cents: BytesVec, - pub height_to_price_ohlc_in_cents: BytesVec, -} - -impl Vecs { - pub fn forced_import(parent: &Path, fetcher: Fetcher, version: Version) -> Result { - let db = Database::open(&parent.join(DB_NAME))?; - db.set_min_len(PAGE_SIZE * 1_000_000)?; - - let this = Self { - fetcher, - - dateindex_to_price_ohlc_in_cents: BytesVec::forced_import( - &db, - "price_ohlc_in_cents", - version + Version::ZERO, - )?, - height_to_price_ohlc_in_cents: BytesVec::forced_import( - &db, - "price_ohlc_in_cents", - version + Version::ZERO, - )?, - - db, - }; - - this.db.retain_regions( - this.iter_any_exportable() - .flat_map(|v| v.region_names()) - .collect(), - )?; - this.db.compact()?; - - Ok(this) - } - - pub fn compute( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.compute_(indexer, indexes, starting_indexes, exit)?; - let _lock = exit.lock(); - self.db.compact()?; - Ok(()) - } - - fn compute_( - &mut self, - indexer: &Indexer, - indexes: &indexes::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - let height_to_timestamp = &indexer.vecs.block.height_to_timestamp; - let index = starting_indexes - .height - .min(Height::from(self.height_to_price_ohlc_in_cents.len())); - let mut prev_timestamp = index - .decremented() - .map(|prev_i| height_to_timestamp.iter().unwrap().get_unwrap(prev_i)); - height_to_timestamp - .iter()? - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, v)| -> Result<()> { - self.height_to_price_ohlc_in_cents.truncate_push_at( - i, - self.fetcher - .get_height(i.into(), v, prev_timestamp) - .unwrap(), - )?; - prev_timestamp = Some(v); - Ok(()) - })?; - self.height_to_price_ohlc_in_cents.safe_write(exit)?; - - let index = starting_indexes - .dateindex - .min(DateIndex::from(self.dateindex_to_price_ohlc_in_cents.len())); - let mut prev = Some(index.decremented().map_or(OHLCCents::default(), |prev_i| { - self.dateindex_to_price_ohlc_in_cents - .iter() - .unwrap() - .get_unwrap(prev_i) - })); - indexes - .time - .dateindex_to_date - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, d)| -> Result<()> { - let ohlc = if i.to_usize() + 100 >= self.dateindex_to_price_ohlc_in_cents.len() - && let Ok(mut ohlc) = self.fetcher.get_date(d) - { - let prev_open = *prev.u().close; - *ohlc.open = prev_open; - *ohlc.high = (*ohlc.high).max(prev_open); - *ohlc.low = (*ohlc.low).min(prev_open); - ohlc - } else { - prev.clone().unwrap() - }; - - prev.replace(ohlc.clone()); - - self.dateindex_to_price_ohlc_in_cents - .truncate_push_at(i, ohlc)?; - - Ok(()) - })?; - self.dateindex_to_price_ohlc_in_cents.safe_write(exit)?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/grouped/lazy_from_height_strict.rs b/crates/brk_computer/src/grouped/lazy_from_height_strict.rs deleted file mode 100644 index 4d49f5ce5..000000000 --- a/crates/brk_computer/src/grouped/lazy_from_height_strict.rs +++ /dev/null @@ -1,70 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Height, Version}; -use schemars::JsonSchema; -use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform}; - -use super::{ComputedVecValue, ComputedVecsFromHeightStrict, LazyTransformBuilder}; - -const VERSION: Version = Version::ZERO; - -/// Fully lazy version of `ComputedVecsFromHeightStrict` where all vecs are lazy transforms. -#[derive(Clone)] -pub struct LazyVecsFromHeightStrict -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, -{ - pub height: LazyVecFrom1, - pub difficultyepoch: LazyTransformBuilder, -} - -impl LazyVecsFromHeightStrict -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, -{ - /// Create a lazy transform from a stored `ComputedVecsFromHeightStrict`. - /// F is the transform type (e.g., `Negate`, `Halve`). - pub fn from_computed>( - name: &str, - version: Version, - height_source: IterableBoxedVec, - source: &ComputedVecsFromHeightStrict, - ) -> Self { - let v = version + VERSION; - Self { - height: LazyVecFrom1::transformed::(name, v, height_source), - difficultyepoch: LazyTransformBuilder::from_lazy::(name, v, &source.difficultyepoch), - } - } -} - -impl Traversable for LazyVecsFromHeightStrict -where - T: ComputedVecValue + JsonSchema, - S1T: ComputedVecValue, -{ - fn to_tree_node(&self) -> brk_traversable::TreeNode { - brk_traversable::TreeNode::Branch( - [ - Some(("height".to_string(), self.height.to_tree_node())), - Some(( - "difficultyepoch".to_string(), - self.difficultyepoch.to_tree_node(), - )), - ] - .into_iter() - .flatten() - .collect(), - ) - .merge_branches() - .unwrap() - } - - fn iter_any_exportable(&self) -> impl Iterator { - let mut regular_iter: Box> = - Box::new(self.height.iter_any_exportable()); - regular_iter = Box::new(regular_iter.chain(self.difficultyepoch.iter_any_exportable())); - regular_iter - } -} diff --git a/crates/brk_computer/src/grouped/lazy_from_txindex.rs b/crates/brk_computer/src/grouped/lazy_from_txindex.rs deleted file mode 100644 index ac64a7d57..000000000 --- a/crates/brk_computer/src/grouped/lazy_from_txindex.rs +++ /dev/null @@ -1,113 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex, - TxIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform}; - -use super::{ComputedVecValue, ComputedVecsFromTxindex, LazyTransformBuilder}; - -const VERSION: Version = Version::ZERO; - -/// Fully lazy version of `ComputedVecsFromTxindex` where all vecs are lazy transforms. -#[derive(Clone)] -pub struct LazyVecsFromTxindex -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, -{ - pub txindex: Option>, - pub height: LazyTransformBuilder, - pub dateindex: LazyTransformBuilder, - pub weekindex: LazyTransformBuilder, - pub difficultyepoch: LazyTransformBuilder, - pub monthindex: LazyTransformBuilder, - pub quarterindex: LazyTransformBuilder, - pub semesterindex: LazyTransformBuilder, - pub yearindex: LazyTransformBuilder, - pub decadeindex: LazyTransformBuilder, -} - -impl LazyVecsFromTxindex -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, -{ - /// Create a lazy transform from a stored `ComputedVecsFromTxindex`. - /// F is the transform type (e.g., `Negate`, `Halve`). - pub fn from_computed>( - name: &str, - version: Version, - txindex_source: Option>, - source: &ComputedVecsFromTxindex, - ) -> Self { - let v = version + VERSION; - Self { - txindex: txindex_source.map(|s| LazyVecFrom1::transformed::(name, v, s)), - height: LazyTransformBuilder::from_eager::(name, v, &source.height), - dateindex: LazyTransformBuilder::from_eager::(name, v, &source.dateindex), - weekindex: LazyTransformBuilder::from_lazy::(name, v, &source.weekindex), - difficultyepoch: LazyTransformBuilder::from_eager::(name, v, &source.difficultyepoch), - monthindex: LazyTransformBuilder::from_lazy::(name, v, &source.monthindex), - quarterindex: LazyTransformBuilder::from_lazy::(name, v, &source.quarterindex), - semesterindex: LazyTransformBuilder::from_lazy::(name, v, &source.semesterindex), - yearindex: LazyTransformBuilder::from_lazy::(name, v, &source.yearindex), - decadeindex: LazyTransformBuilder::from_lazy::(name, v, &source.decadeindex), - } - } -} - -impl Traversable for LazyVecsFromTxindex -where - T: ComputedVecValue + JsonSchema, - S1T: ComputedVecValue, -{ - fn to_tree_node(&self) -> brk_traversable::TreeNode { - brk_traversable::TreeNode::Branch( - [ - self.txindex - .as_ref() - .map(|v| ("txindex".to_string(), v.to_tree_node())), - Some(("height".to_string(), self.height.to_tree_node())), - Some(("dateindex".to_string(), self.dateindex.to_tree_node())), - Some(( - "difficultyepoch".to_string(), - self.difficultyepoch.to_tree_node(), - )), - Some(("weekindex".to_string(), self.weekindex.to_tree_node())), - Some(("monthindex".to_string(), self.monthindex.to_tree_node())), - Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())), - Some(( - "semesterindex".to_string(), - self.semesterindex.to_tree_node(), - )), - Some(("yearindex".to_string(), self.yearindex.to_tree_node())), - Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())), - ] - .into_iter() - .flatten() - .collect(), - ) - .merge_branches() - .unwrap() - } - - fn iter_any_exportable(&self) -> impl Iterator { - let mut regular_iter: Box> = - Box::new(std::iter::empty()); - if let Some(ref txindex) = self.txindex { - regular_iter = Box::new(regular_iter.chain(txindex.iter_any_exportable())); - } - regular_iter = Box::new(regular_iter.chain(self.height.iter_any_exportable())); - regular_iter = Box::new(regular_iter.chain(self.dateindex.iter_any_exportable())); - regular_iter = Box::new(regular_iter.chain(self.difficultyepoch.iter_any_exportable())); - regular_iter = Box::new(regular_iter.chain(self.weekindex.iter_any_exportable())); - regular_iter = Box::new(regular_iter.chain(self.monthindex.iter_any_exportable())); - regular_iter = Box::new(regular_iter.chain(self.quarterindex.iter_any_exportable())); - regular_iter = Box::new(regular_iter.chain(self.semesterindex.iter_any_exportable())); - regular_iter = Box::new(regular_iter.chain(self.yearindex.iter_any_exportable())); - regular_iter = Box::new(regular_iter.chain(self.decadeindex.iter_any_exportable())); - regular_iter - } -} diff --git a/crates/brk_computer/src/indexes/block/compute.rs b/crates/brk_computer/src/indexes/block/compute.rs index 32039aa16..f20b01f51 100644 --- a/crates/brk_computer/src/indexes/block/compute.rs +++ b/crates/brk_computer/src/indexes/block/compute.rs @@ -1,14 +1,16 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::{Date, DateIndex, DifficultyEpoch, HalvingEpoch}; +use brk_types::{DateIndex, DifficultyEpoch, HalvingEpoch}; use vecdb::{Exit, TypedVecIterator}; use super::Vecs; +use crate::blocks; impl Vecs { pub fn compute( &mut self, indexer: &Indexer, + blocks_time: &blocks::time::Vecs, starting_indexes: &brk_indexer::Indexes, exit: &Exit, ) -> Result<(DateIndex, DifficultyEpoch, HalvingEpoch)> { @@ -25,45 +27,9 @@ impl Vecs { exit, )?; - self.height_to_date.compute_transform( - starting_indexes.height, - &indexer.vecs.block.height_to_timestamp, - |(h, t, ..)| (h, Date::from(t)), - exit, - )?; - - let mut prev_timestamp_fixed = None; - self.height_to_timestamp_fixed.compute_transform( - starting_indexes.height, - &indexer.vecs.block.height_to_timestamp, - |(h, timestamp, height_to_timestamp_fixed_iter)| { - if prev_timestamp_fixed.is_none() - && let Some(prev_h) = h.decremented() - { - prev_timestamp_fixed.replace( - height_to_timestamp_fixed_iter - .into_iter() - .get_unwrap(prev_h), - ); - } - let timestamp_fixed = - prev_timestamp_fixed.map_or(timestamp, |prev_d| prev_d.max(timestamp)); - prev_timestamp_fixed.replace(timestamp_fixed); - (h, timestamp_fixed) - }, - exit, - )?; - - self.height_to_date_fixed.compute_transform( - starting_indexes.height, - &self.height_to_timestamp_fixed, - |(h, t, ..)| (h, Date::from(t)), - exit, - )?; - let decremented_starting_height = starting_indexes.height.decremented().unwrap_or_default(); - // DateIndex (computed before time module needs it) + // DateIndex (uses blocks_time.height_to_date_fixed computed in blocks::time::compute_early) let starting_dateindex = self .height_to_dateindex .into_iter() @@ -72,7 +38,7 @@ impl Vecs { self.height_to_dateindex.compute_transform( starting_indexes.height, - &self.height_to_date_fixed, + &blocks_time.height_to_date_fixed, |(h, d, ..)| (h, DateIndex::try_from(d).unwrap()), exit, )?; @@ -115,7 +81,7 @@ impl Vecs { self.difficultyepoch_to_height_count.compute_count_from_indexes( starting_difficultyepoch, &self.difficultyepoch_to_first_height, - &self.height_to_date, + &blocks_time.height_to_date, exit, )?; diff --git a/crates/brk_computer/src/indexes/block/import.rs b/crates/brk_computer/src/indexes/block/import.rs index 6e2b38038..28017b2c2 100644 --- a/crates/brk_computer/src/indexes/block/import.rs +++ b/crates/brk_computer/src/indexes/block/import.rs @@ -7,13 +7,10 @@ use super::Vecs; impl Vecs { pub fn forced_import(db: &Database, version: Version) -> Result { Ok(Self { - height_to_date: EagerVec::forced_import(db, "date", version)?, - height_to_date_fixed: EagerVec::forced_import(db, "date_fixed", version)?, height_to_dateindex: EagerVec::forced_import(db, "dateindex", version)?, height_to_difficultyepoch: EagerVec::forced_import(db, "difficultyepoch", version)?, height_to_halvingepoch: EagerVec::forced_import(db, "halvingepoch", version)?, height_to_height: EagerVec::forced_import(db, "height", version)?, - height_to_timestamp_fixed: EagerVec::forced_import(db, "timestamp_fixed", version)?, height_to_txindex_count: EagerVec::forced_import(db, "txindex_count", version)?, difficultyepoch_to_difficultyepoch: EagerVec::forced_import(db, "difficultyepoch", version)?, difficultyepoch_to_first_height: EagerVec::forced_import(db, "first_height", version)?, diff --git a/crates/brk_computer/src/indexes/block/vecs.rs b/crates/brk_computer/src/indexes/block/vecs.rs index 7185b6376..1f51ae05c 100644 --- a/crates/brk_computer/src/indexes/block/vecs.rs +++ b/crates/brk_computer/src/indexes/block/vecs.rs @@ -1,16 +1,13 @@ use brk_traversable::Traversable; -use brk_types::{Date, DateIndex, DifficultyEpoch, HalvingEpoch, Height, StoredU64, Timestamp}; +use brk_types::{DateIndex, DifficultyEpoch, HalvingEpoch, Height, StoredU64}; use vecdb::{EagerVec, PcoVec}; #[derive(Clone, Traversable)] pub struct Vecs { - pub height_to_date: EagerVec>, - pub height_to_date_fixed: EagerVec>, pub height_to_dateindex: EagerVec>, pub height_to_difficultyepoch: EagerVec>, pub height_to_halvingepoch: EagerVec>, pub height_to_height: EagerVec>, - pub height_to_timestamp_fixed: EagerVec>, pub height_to_txindex_count: EagerVec>, pub difficultyepoch_to_difficultyepoch: EagerVec>, pub difficultyepoch_to_first_height: EagerVec>, diff --git a/crates/brk_computer/src/indexes/mod.rs b/crates/brk_computer/src/indexes/mod.rs index 827d1df8b..2df7ac884 100644 --- a/crates/brk_computer/src/indexes/mod.rs +++ b/crates/brk_computer/src/indexes/mod.rs @@ -13,6 +13,8 @@ use brk_types::{Indexes, Version}; pub use brk_types::ComputeIndexes; use vecdb::{Database, Exit, PAGE_SIZE}; +use crate::blocks; + pub use address::Vecs as AddressVecs; pub use block::Vecs as BlockVecs; pub use time::Vecs as TimeVecs; @@ -62,10 +64,11 @@ impl Vecs { pub fn compute( &mut self, indexer: &Indexer, + blocks_time: &blocks::time::Vecs, starting_indexes: Indexes, exit: &Exit, ) -> Result { - let indexes = self.compute_(indexer, starting_indexes, exit)?; + let indexes = self.compute_(indexer, blocks_time, starting_indexes, exit)?; let _lock = exit.lock(); self.db.compact()?; Ok(indexes) @@ -74,6 +77,7 @@ impl Vecs { fn compute_( &mut self, indexer: &Indexer, + blocks_time: &blocks::time::Vecs, starting_indexes: Indexes, exit: &Exit, ) -> Result { @@ -81,8 +85,9 @@ impl Vecs { self.transaction.compute(indexer, &starting_indexes, exit)?; // Block indexes (height, dateindex, difficultyepoch, halvingepoch) + // Uses blocks_time.height_to_date_fixed computed in blocks::time::compute_early let (starting_dateindex, starting_difficultyepoch, starting_halvingepoch) = - self.block.compute(indexer, &starting_indexes, exit)?; + self.block.compute(indexer, blocks_time, &starting_indexes, exit)?; // Time indexes (depends on block.height_to_dateindex) let time_indexes = self diff --git a/crates/brk_computer/src/inputs/compute.rs b/crates/brk_computer/src/inputs/compute.rs new file mode 100644 index 000000000..73e6bc17d --- /dev/null +++ b/crates/brk_computer/src/inputs/compute.rs @@ -0,0 +1,25 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use super::Vecs; +use crate::{indexes, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.spent + .compute(&self.db, indexer, starting_indexes, exit)?; + self.count + .compute(indexer, indexes, starting_indexes, exit)?; + + let _lock = exit.lock(); + self.db.compact()?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/inputs/count/compute.rs b/crates/brk_computer/src/inputs/count/compute.rs new file mode 100644 index 000000000..1a875aa78 --- /dev/null +++ b/crates/brk_computer/src/inputs/count/compute.rs @@ -0,0 +1,26 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use super::Vecs; +use crate::{indexes, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.indexes_to_count.compute_rest( + indexer, + indexes, + starting_indexes, + exit, + Some(&indexes.transaction.txindex_to_input_count), + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/inputs/count/import.rs b/crates/brk_computer/src/inputs/count/import.rs new file mode 100644 index 000000000..991f9fa53 --- /dev/null +++ b/crates/brk_computer/src/inputs/count/import.rs @@ -0,0 +1,33 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, IterableCloneableVec}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromTxindex, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + let full_stats = || { + VecBuilderOptions::default() + .add_average() + .add_minmax() + .add_percentiles() + .add_sum() + .add_cumulative() + }; + + Ok(Self { + indexes_to_count: ComputedVecsFromTxindex::forced_import( + db, + "input_count", + Source::Vec(indexes.transaction.txindex_to_input_count.boxed_clone()), + version + Version::ZERO, + indexes, + full_stats(), + )?, + }) + } +} diff --git a/crates/brk_computer/src/inputs/count/mod.rs b/crates/brk_computer/src/inputs/count/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/inputs/count/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/inputs/count/vecs.rs b/crates/brk_computer/src/inputs/count/vecs.rs new file mode 100644 index 000000000..f17fe94c6 --- /dev/null +++ b/crates/brk_computer/src/inputs/count/vecs.rs @@ -0,0 +1,9 @@ +use brk_traversable::Traversable; +use brk_types::StoredU64; + +use crate::internal::ComputedVecsFromTxindex; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_count: ComputedVecsFromTxindex, +} diff --git a/crates/brk_computer/src/inputs/import.rs b/crates/brk_computer/src/inputs/import.rs new file mode 100644 index 000000000..66c27fa48 --- /dev/null +++ b/crates/brk_computer/src/inputs/import.rs @@ -0,0 +1,36 @@ +use std::path::Path; + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::Version; +use vecdb::{Database, PAGE_SIZE}; + +use super::{CountVecs, SpentVecs, Vecs}; +use crate::indexes; + +impl Vecs { + pub fn forced_import( + parent_path: &Path, + parent_version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let db = Database::open(&parent_path.join(super::DB_NAME))?; + db.set_min_len(PAGE_SIZE * 50_000_000)?; + + let version = parent_version + Version::ZERO; + + let spent = SpentVecs::forced_import(&db, version)?; + let count = CountVecs::forced_import(&db, version, indexes)?; + + let this = Self { db, spent, count }; + + this.db.retain_regions( + this.iter_any_exportable() + .flat_map(|v| v.region_names()) + .collect(), + )?; + this.db.compact()?; + + Ok(this) + } +} diff --git a/crates/brk_computer/src/inputs/mod.rs b/crates/brk_computer/src/inputs/mod.rs new file mode 100644 index 000000000..515cac15e --- /dev/null +++ b/crates/brk_computer/src/inputs/mod.rs @@ -0,0 +1,22 @@ +pub mod count; +pub mod spent; + +mod compute; +mod import; + +use brk_traversable::Traversable; +use vecdb::Database; + +pub use count::Vecs as CountVecs; +pub use spent::Vecs as SpentVecs; + +pub const DB_NAME: &str = "inputs"; + +#[derive(Clone, Traversable)] +pub struct Vecs { + #[traversable(skip)] + pub(crate) db: Database, + + pub spent: SpentVecs, + pub count: CountVecs, +} diff --git a/crates/brk_computer/src/txins.rs b/crates/brk_computer/src/inputs/spent/compute.rs similarity index 67% rename from crates/brk_computer/src/txins.rs rename to crates/brk_computer/src/inputs/spent/compute.rs index 01593828c..dc4e4633b 100644 --- a/crates/brk_computer/src/txins.rs +++ b/crates/brk_computer/src/inputs/spent/compute.rs @@ -1,68 +1,31 @@ -use std::path::Path; - use brk_error::Result; use brk_indexer::Indexer; -use brk_traversable::Traversable; -use brk_types::{Sats, TxInIndex, TxIndex, TxOutIndex, Version, Vout}; +use brk_types::{Sats, TxInIndex, TxIndex, TxOutIndex, Vout}; use log::info; -use vecdb::{ - AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, ImportableVec, PAGE_SIZE, PcoVec, - TypedVecIterator, VecIndex, -}; +use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, TypedVecIterator, VecIndex}; -use super::ComputeIndexes; +use super::Vecs; +use crate::ComputeIndexes; const BATCH_SIZE: usize = 2 * 1024 * 1024 * 1024 / size_of::(); -pub const DB_NAME: &str = "txins"; - -#[derive(Clone, Traversable)] -pub struct Vecs { - db: Database, - pub txinindex_to_txoutindex: PcoVec, - pub txinindex_to_value: PcoVec, -} impl Vecs { - pub fn forced_import(parent_path: &Path, parent_version: Version) -> Result { - let db = Database::open(&parent_path.join(DB_NAME))?; - db.set_min_len(PAGE_SIZE * 10_000_000)?; - - let version = parent_version + Version::ZERO; - - let this = Self { - txinindex_to_txoutindex: PcoVec::forced_import(&db, "txoutindex", version)?, - txinindex_to_value: PcoVec::forced_import(&db, "value", version)?, - db, - }; - - this.db.retain_regions( - this.iter_any_exportable() - .flat_map(|v| v.region_names()) - .collect(), - )?; - this.db.compact()?; - - Ok(this) - } - pub fn compute( &mut self, + db: &Database, indexer: &Indexer, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.compute_(indexer, starting_indexes, exit)?; - let _lock = exit.lock(); - self.db.compact()?; - Ok(()) - } + // Validate computed versions against dependencies + let dep_version = indexer.vecs.txin.txinindex_to_outpoint.version() + + indexer.vecs.tx.txindex_to_first_txoutindex.version() + + indexer.vecs.txout.txoutindex_to_value.version(); + self.txinindex_to_txoutindex + .validate_computed_version_or_reset(dep_version)?; + self.txinindex_to_value + .validate_computed_version_or_reset(dep_version)?; - fn compute_( - &mut self, - indexer: &Indexer, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { let target = indexer.vecs.txin.txinindex_to_outpoint.len(); if target == 0 { return Ok(()); @@ -131,7 +94,7 @@ impl Vecs { let _lock = exit.lock(); self.txinindex_to_txoutindex.write()?; self.txinindex_to_value.write()?; - self.db.flush()?; + db.flush()?; batch_start = batch_end; } diff --git a/crates/brk_computer/src/inputs/spent/import.rs b/crates/brk_computer/src/inputs/spent/import.rs new file mode 100644 index 000000000..388ab0159 --- /dev/null +++ b/crates/brk_computer/src/inputs/spent/import.rs @@ -0,0 +1,14 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, ImportableVec, PcoVec}; + +use super::Vecs; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + txinindex_to_txoutindex: PcoVec::forced_import(db, "txoutindex", version)?, + txinindex_to_value: PcoVec::forced_import(db, "value", version)?, + }) + } +} diff --git a/crates/brk_computer/src/inputs/spent/mod.rs b/crates/brk_computer/src/inputs/spent/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/inputs/spent/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/inputs/spent/vecs.rs b/crates/brk_computer/src/inputs/spent/vecs.rs new file mode 100644 index 000000000..3abdee1e1 --- /dev/null +++ b/crates/brk_computer/src/inputs/spent/vecs.rs @@ -0,0 +1,9 @@ +use brk_traversable::Traversable; +use brk_types::{Sats, TxInIndex, TxOutIndex}; +use vecdb::PcoVec; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub txinindex_to_txoutindex: PcoVec, + pub txinindex_to_value: PcoVec, +} diff --git a/crates/brk_computer/src/grouped/builder/eager.rs b/crates/brk_computer/src/internal/builder/eager.rs similarity index 93% rename from crates/brk_computer/src/grouped/builder/eager.rs rename to crates/brk_computer/src/internal/builder/eager.rs index 5338d4a1f..8b33ea171 100644 --- a/crates/brk_computer/src/grouped/builder/eager.rs +++ b/crates/brk_computer/src/internal/builder/eager.rs @@ -199,6 +199,7 @@ where let sum = values.into_iter().fold(T::from(0), |a, b| a + b); if let Some(average) = self.average.as_mut() { + // len == 0 handled by T's Div returning NaN average.truncate_push_at(index, sum / len)?; } @@ -244,7 +245,8 @@ where Ok(()) })?; - self.safe_write(exit)?; + let _lock = exit.lock(); + self.write()?; Ok(()) } @@ -341,7 +343,8 @@ where Ok(()) })?; - self.safe_write(exit)?; + let _lock = exit.lock(); + self.write()?; Ok(()) } @@ -439,6 +442,7 @@ where .fold(T::from(0), |a, b| a + b); // TODO: Multiply by count then divide by cumulative // Right now it's not 100% accurate as there could be more or less elements in the lower timeframe (28 days vs 31 days in a month for example) + // len == 0 handled by T's Div returning NaN let avg = sum / len; average.truncate_push_at(index, avg)?; } @@ -466,7 +470,8 @@ where Ok(()) })?; - self.safe_write(exit)?; + let _lock = exit.lock(); + self.write()?; Ok(()) } @@ -526,83 +531,83 @@ where self.cumulative.u() } - pub fn safe_write(&mut self, exit: &Exit) -> Result<()> { + pub fn write(&mut self) -> Result<()> { if let Some(first) = self.first.as_mut() { - first.safe_write(exit)?; + first.write()?; } if let Some(last) = self.last.as_mut() { - last.safe_write(exit)?; + last.write()?; } if let Some(min) = self.min.as_mut() { - min.safe_write(exit)?; + min.write()?; } if let Some(max) = self.max.as_mut() { - max.safe_write(exit)?; + max.write()?; } if let Some(median) = self.median.as_mut() { - median.safe_write(exit)?; + median.write()?; } if let Some(average) = self.average.as_mut() { - average.safe_write(exit)?; + average.write()?; } if let Some(sum) = self.sum.as_mut() { - sum.safe_write(exit)?; + sum.write()?; } if let Some(cumulative) = self.cumulative.as_mut() { - cumulative.safe_write(exit)?; + cumulative.write()?; } if let Some(pct90) = self.pct90.as_mut() { - pct90.safe_write(exit)?; + pct90.write()?; } if let Some(pct75) = self.pct75.as_mut() { - pct75.safe_write(exit)?; + pct75.write()?; } if let Some(pct25) = self.pct25.as_mut() { - pct25.safe_write(exit)?; + pct25.write()?; } if let Some(pct10) = self.pct10.as_mut() { - pct10.safe_write(exit)?; + pct10.write()?; } Ok(()) } - pub fn validate_computed_version_or_reset(&mut self, version: Version) -> Result<()> { + pub fn validate_computed_version_or_reset(&mut self, dep_version: Version) -> Result<()> { if let Some(first) = self.first.as_mut() { - first.validate_computed_version_or_reset(Version::ZERO + version)?; + first.validate_computed_version_or_reset(dep_version)?; } if let Some(last) = self.last.as_mut() { - last.validate_computed_version_or_reset(Version::ZERO + version)?; + last.validate_computed_version_or_reset(dep_version)?; } if let Some(min) = self.min.as_mut() { - min.validate_computed_version_or_reset(Version::ZERO + version)?; + min.validate_computed_version_or_reset(dep_version)?; } if let Some(max) = self.max.as_mut() { - max.validate_computed_version_or_reset(Version::ZERO + version)?; + max.validate_computed_version_or_reset(dep_version)?; } if let Some(median) = self.median.as_mut() { - median.validate_computed_version_or_reset(Version::ZERO + version)?; + median.validate_computed_version_or_reset(dep_version)?; } if let Some(average) = self.average.as_mut() { - average.validate_computed_version_or_reset(Version::ZERO + version)?; + average.validate_computed_version_or_reset(dep_version)?; } if let Some(sum) = self.sum.as_mut() { - sum.validate_computed_version_or_reset(Version::ZERO + version)?; + sum.validate_computed_version_or_reset(dep_version)?; } if let Some(cumulative) = self.cumulative.as_mut() { - cumulative.validate_computed_version_or_reset(Version::ZERO + version)?; + cumulative.validate_computed_version_or_reset(dep_version)?; } if let Some(pct90) = self.pct90.as_mut() { - pct90.validate_computed_version_or_reset(Version::ZERO + version)?; + pct90.validate_computed_version_or_reset(dep_version)?; } if let Some(pct75) = self.pct75.as_mut() { - pct75.validate_computed_version_or_reset(Version::ZERO + version)?; + pct75.validate_computed_version_or_reset(dep_version)?; } if let Some(pct25) = self.pct25.as_mut() { - pct25.validate_computed_version_or_reset(Version::ZERO + version)?; + pct25.validate_computed_version_or_reset(dep_version)?; } if let Some(pct10) = self.pct10.as_mut() { - pct10.validate_computed_version_or_reset(Version::ZERO + version)?; + pct10.validate_computed_version_or_reset(dep_version)?; } Ok(()) diff --git a/crates/brk_computer/src/grouped/builder/lazy.rs b/crates/brk_computer/src/internal/builder/lazy.rs similarity index 99% rename from crates/brk_computer/src/grouped/builder/lazy.rs rename to crates/brk_computer/src/internal/builder/lazy.rs index 84abfb3e4..7abb864b2 100644 --- a/crates/brk_computer/src/grouped/builder/lazy.rs +++ b/crates/brk_computer/src/internal/builder/lazy.rs @@ -3,7 +3,7 @@ use brk_types::Version; use schemars::JsonSchema; use vecdb::{FromCoarserIndex, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, VecIndex}; -use crate::grouped::{EagerVecsBuilder, VecBuilderOptions}; +use crate::internal::{EagerVecsBuilder, VecBuilderOptions}; use crate::utils::OptionExt; use super::super::ComputedVecValue; diff --git a/crates/brk_computer/src/grouped/builder/mod.rs b/crates/brk_computer/src/internal/builder/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/builder/mod.rs rename to crates/brk_computer/src/internal/builder/mod.rs diff --git a/crates/brk_computer/src/grouped/builder/transform.rs b/crates/brk_computer/src/internal/builder/transform.rs similarity index 100% rename from crates/brk_computer/src/grouped/builder/transform.rs rename to crates/brk_computer/src/internal/builder/transform.rs diff --git a/crates/brk_computer/src/grouped/builder/transform2.rs b/crates/brk_computer/src/internal/builder/transform2.rs similarity index 100% rename from crates/brk_computer/src/grouped/builder/transform2.rs rename to crates/brk_computer/src/internal/builder/transform2.rs diff --git a/crates/brk_computer/src/grouped/computed/from_dateindex.rs b/crates/brk_computer/src/internal/computed/from_dateindex.rs similarity index 97% rename from crates/brk_computer/src/grouped/computed/from_dateindex.rs rename to crates/brk_computer/src/internal/computed/from_dateindex.rs index d2011c97b..2a2f20b9f 100644 --- a/crates/brk_computer/src/grouped/computed/from_dateindex.rs +++ b/crates/brk_computer/src/internal/computed/from_dateindex.rs @@ -9,9 +9,9 @@ use vecdb::{ PcoVec, }; -use crate::{ComputeIndexes, grouped::LazyVecsBuilder, indexes, utils::OptionExt}; +use crate::{ComputeIndexes, internal::LazyVecsBuilder, indexes, utils::OptionExt}; -use crate::grouped::{ComputedVecValue, EagerVecsBuilder, Source, VecBuilderOptions}; +use crate::internal::{ComputedVecValue, EagerVecsBuilder, Source, VecBuilderOptions}; #[derive(Clone)] pub struct ComputedVecsFromDateIndex diff --git a/crates/brk_computer/src/grouped/computed/from_height/mod.rs b/crates/brk_computer/src/internal/computed/from_height/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/computed/from_height/mod.rs rename to crates/brk_computer/src/internal/computed/from_height/mod.rs diff --git a/crates/brk_computer/src/grouped/computed/from_height/standard.rs b/crates/brk_computer/src/internal/computed/from_height/standard.rs similarity index 98% rename from crates/brk_computer/src/grouped/computed/from_height/standard.rs rename to crates/brk_computer/src/internal/computed/from_height/standard.rs index e64458ece..7397a5593 100644 --- a/crates/brk_computer/src/grouped/computed/from_height/standard.rs +++ b/crates/brk_computer/src/internal/computed/from_height/standard.rs @@ -13,12 +13,12 @@ use vecdb::{ use crate::{ ComputeIndexes, - grouped::{LazyVecsBuilder, Source}, + internal::{LazyVecsBuilder, Source}, indexes, utils::OptionExt, }; -use crate::grouped::{ComputedVecValue, EagerVecsBuilder, VecBuilderOptions}; +use crate::internal::{ComputedVecValue, EagerVecsBuilder, VecBuilderOptions}; #[derive(Clone)] pub struct ComputedVecsFromHeight diff --git a/crates/brk_computer/src/grouped/computed/from_height/strict.rs b/crates/brk_computer/src/internal/computed/from_height/strict.rs similarity index 97% rename from crates/brk_computer/src/grouped/computed/from_height/strict.rs rename to crates/brk_computer/src/internal/computed/from_height/strict.rs index 301edb696..e6c247298 100644 --- a/crates/brk_computer/src/grouped/computed/from_height/strict.rs +++ b/crates/brk_computer/src/internal/computed/from_height/strict.rs @@ -9,7 +9,7 @@ use vecdb::{ use crate::{ComputeIndexes, indexes}; -use crate::grouped::{ComputedVecValue, EagerVecsBuilder, LazyVecsBuilder, VecBuilderOptions}; +use crate::internal::{ComputedVecValue, EagerVecsBuilder, LazyVecsBuilder, VecBuilderOptions}; #[derive(Clone)] pub struct ComputedVecsFromHeightStrict diff --git a/crates/brk_computer/src/grouped/computed/from_txindex.rs b/crates/brk_computer/src/internal/computed/from_txindex.rs similarity index 98% rename from crates/brk_computer/src/grouped/computed/from_txindex.rs rename to crates/brk_computer/src/internal/computed/from_txindex.rs index 4a4973a73..890ad090f 100644 --- a/crates/brk_computer/src/grouped/computed/from_txindex.rs +++ b/crates/brk_computer/src/internal/computed/from_txindex.rs @@ -13,12 +13,12 @@ use vecdb::{ use crate::{ ComputeIndexes, - grouped::{LazyVecsBuilder, Source}, + internal::{LazyVecsBuilder, Source}, indexes, price, utils::OptionExt, }; -use crate::grouped::{ComputedVecValue, EagerVecsBuilder, VecBuilderOptions}; +use crate::internal::{ComputedVecValue, EagerVecsBuilder, VecBuilderOptions}; #[derive(Clone)] pub struct ComputedVecsFromTxindex @@ -302,7 +302,7 @@ impl ComputedVecsFromTxindex { Ok(()) })?; - self.height.safe_write(exit)?; + self.height.write()?; self.compute_after_height(indexes, starting_indexes, exit) } @@ -331,7 +331,7 @@ impl ComputedVecsFromTxindex { let starting_index = self.height.starting_index(starting_indexes.height); - let mut close_iter = price.chainindexes_to_price_close.height.into_iter(); + let mut close_iter = price.usd.chainindexes_to_price_close.height.into_iter(); // Create iterators once before the loop to avoid repeated iterator creation let mut first_iter = bitcoin.height.first.as_ref().map(|v| v.into_iter()); @@ -392,7 +392,7 @@ impl ComputedVecsFromTxindex { Ok(()) })?; - self.height.safe_write(exit)?; + self.height.write()?; self.compute_after_height(indexes, starting_indexes, exit) } diff --git a/crates/brk_computer/src/grouped/computed/mod.rs b/crates/brk_computer/src/internal/computed/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/computed/mod.rs rename to crates/brk_computer/src/internal/computed/mod.rs diff --git a/crates/brk_computer/src/grouped/computed/traits.rs b/crates/brk_computer/src/internal/computed/traits.rs similarity index 100% rename from crates/brk_computer/src/grouped/computed/traits.rs rename to crates/brk_computer/src/internal/computed/traits.rs diff --git a/crates/brk_computer/src/grouped/lazy/binary/from_dateindex.rs b/crates/brk_computer/src/internal/lazy/binary/from_dateindex.rs similarity index 75% rename from crates/brk_computer/src/grouped/lazy/binary/from_dateindex.rs rename to crates/brk_computer/src/internal/lazy/binary/from_dateindex.rs index 6080cb7d3..0c0f85c11 100644 --- a/crates/brk_computer/src/grouped/lazy/binary/from_dateindex.rs +++ b/crates/brk_computer/src/internal/lazy/binary/from_dateindex.rs @@ -5,7 +5,7 @@ use brk_types::{ use schemars::JsonSchema; use vecdb::{AnyExportableVec, BinaryTransform, IterableCloneableVec, LazyVecFrom2}; -use crate::grouped::{ComputedVecValue, ComputedVecsFromDateIndex, ComputedVecsFromHeight, LazyTransform2Builder}; +use crate::internal::{ComputedVecValue, ComputedVecsFromDateIndex, ComputedVecsFromHeight, LazyTransform2Builder}; const VERSION: Version = Version::ZERO; @@ -145,6 +145,68 @@ where ), } } + + /// Create from a `ComputedVecsFromDateIndex` (first source) and `ComputedVecsFromHeight` (second source). + /// Used for ratios like NVT where numerator is from dateindex (market cap) and denominator from height (volume sum). + pub fn from_dateindex_and_height>( + name: &str, + version: Version, + source1: &ComputedVecsFromDateIndex, + source2: &ComputedVecsFromHeight, + ) -> Self + where + S2T: Ord + From + 'static, + f64: From, + { + let v = version + VERSION; + + Self { + dateindex: source1.dateindex.as_ref().map(|s1| { + LazyVecFrom2::transformed::( + name, + v, + s1.boxed_clone(), + source2.dateindex.unwrap_sum().boxed_clone(), + ) + }), + weekindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.weekindex, + &source2.weekindex, + ), + monthindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.monthindex, + &source2.monthindex, + ), + quarterindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.quarterindex, + &source2.quarterindex, + ), + semesterindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.semesterindex, + &source2.semesterindex, + ), + yearindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.yearindex, + &source2.yearindex, + ), + decadeindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.decadeindex, + &source2.decadeindex, + ), + } + } } impl Traversable for LazyVecsFrom2FromDateIndex diff --git a/crates/brk_computer/src/grouped/lazy/binary/from_height.rs b/crates/brk_computer/src/internal/lazy/binary/from_height.rs similarity index 69% rename from crates/brk_computer/src/grouped/lazy/binary/from_height.rs rename to crates/brk_computer/src/internal/lazy/binary/from_height.rs index 893582d44..1884221f4 100644 --- a/crates/brk_computer/src/grouped/lazy/binary/from_height.rs +++ b/crates/brk_computer/src/internal/lazy/binary/from_height.rs @@ -6,7 +6,9 @@ use brk_types::{ use schemars::JsonSchema; use vecdb::{AnyExportableVec, BinaryTransform, IterableBoxedVec, LazyVecFrom2}; -use crate::grouped::{ComputedVecValue, ComputedVecsFromHeight, LazyTransform2Builder}; +use crate::internal::{ + ComputedVecValue, ComputedVecsFromHeight, ComputedVecsFromTxindex, LazyTransform2Builder, +}; const VERSION: Version = Version::ZERO; @@ -105,6 +107,83 @@ where ), } } + + /// Create from a `ComputedVecsFromHeight` and a `ComputedVecsFromTxindex`. + /// Used for ratios like type_count / total_output_count where the denominator + /// comes from txindex-aggregated data. + pub fn from_height_and_txindex>( + name: &str, + version: Version, + height_source1: IterableBoxedVec, + height_source2: IterableBoxedVec, + source1: &ComputedVecsFromHeight, + source2: &ComputedVecsFromTxindex, + ) -> Self + where + S2T: Ord + From + 'static, + f64: From, + { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), + // For height_extra, source2 uses .height (EagerVecsBuilder) instead of .height_extra + height_extra: LazyTransform2Builder::from_eager::( + name, + v, + &source1.height_extra, + &source2.height, + ), + dateindex: LazyTransform2Builder::from_eager::( + name, + v, + &source1.dateindex, + &source2.dateindex, + ), + weekindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.weekindex, + &source2.weekindex, + ), + difficultyepoch: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.difficultyepoch, + &source2.difficultyepoch, + ), + monthindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.monthindex, + &source2.monthindex, + ), + quarterindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.quarterindex, + &source2.quarterindex, + ), + semesterindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.semesterindex, + &source2.semesterindex, + ), + yearindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.yearindex, + &source2.yearindex, + ), + decadeindex: LazyTransform2Builder::from_lazy::( + name, + v, + &source1.decadeindex, + &source2.decadeindex, + ), + } + } } impl Traversable for LazyVecsFrom2FromHeight diff --git a/crates/brk_computer/src/grouped/lazy/binary/mod.rs b/crates/brk_computer/src/internal/lazy/binary/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/lazy/binary/mod.rs rename to crates/brk_computer/src/internal/lazy/binary/mod.rs diff --git a/crates/brk_computer/src/grouped/lazy/from_dateindex.rs b/crates/brk_computer/src/internal/lazy/from_dateindex.rs similarity index 98% rename from crates/brk_computer/src/grouped/lazy/from_dateindex.rs rename to crates/brk_computer/src/internal/lazy/from_dateindex.rs index 8e58151a1..4cf6a0da4 100644 --- a/crates/brk_computer/src/grouped/lazy/from_dateindex.rs +++ b/crates/brk_computer/src/internal/lazy/from_dateindex.rs @@ -5,7 +5,7 @@ use brk_types::{ use schemars::JsonSchema; use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform}; -use crate::grouped::{ComputedVecValue, ComputedVecsFromDateIndex, LazyTransformBuilder}; +use crate::internal::{ComputedVecValue, ComputedVecsFromDateIndex, LazyTransformBuilder}; const VERSION: Version = Version::ZERO; diff --git a/crates/brk_computer/src/grouped/lazy/from_height.rs b/crates/brk_computer/src/internal/lazy/from_height.rs similarity index 98% rename from crates/brk_computer/src/grouped/lazy/from_height.rs rename to crates/brk_computer/src/internal/lazy/from_height.rs index 15557f017..dae5ba7c9 100644 --- a/crates/brk_computer/src/grouped/lazy/from_height.rs +++ b/crates/brk_computer/src/internal/lazy/from_height.rs @@ -6,7 +6,7 @@ use brk_types::{ use schemars::JsonSchema; use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform}; -use crate::grouped::{ComputedVecValue, ComputedVecsFromHeight, LazyTransformBuilder}; +use crate::internal::{ComputedVecValue, ComputedVecsFromHeight, LazyTransformBuilder}; const VERSION: Version = Version::ZERO; diff --git a/crates/brk_computer/src/grouped/lazy/mod.rs b/crates/brk_computer/src/internal/lazy/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/lazy/mod.rs rename to crates/brk_computer/src/internal/lazy/mod.rs diff --git a/crates/brk_computer/src/grouped/mod.rs b/crates/brk_computer/src/internal/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/mod.rs rename to crates/brk_computer/src/internal/mod.rs diff --git a/crates/brk_computer/src/grouped/source.rs b/crates/brk_computer/src/internal/source.rs similarity index 100% rename from crates/brk_computer/src/grouped/source.rs rename to crates/brk_computer/src/internal/source.rs diff --git a/crates/brk_computer/src/grouped/specialized/constant.rs b/crates/brk_computer/src/internal/specialized/constant.rs similarity index 100% rename from crates/brk_computer/src/grouped/specialized/constant.rs rename to crates/brk_computer/src/internal/specialized/constant.rs diff --git a/crates/brk_computer/src/grouped/specialized/mod.rs b/crates/brk_computer/src/internal/specialized/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/specialized/mod.rs rename to crates/brk_computer/src/internal/specialized/mod.rs diff --git a/crates/brk_computer/src/grouped/specialized/percentiles.rs b/crates/brk_computer/src/internal/specialized/percentiles.rs similarity index 82% rename from crates/brk_computer/src/grouped/specialized/percentiles.rs rename to crates/brk_computer/src/internal/specialized/percentiles.rs index 527c845b6..3df78163e 100644 --- a/crates/brk_computer/src/grouped/specialized/percentiles.rs +++ b/crates/brk_computer/src/internal/specialized/percentiles.rs @@ -3,7 +3,7 @@ use brk_traversable::{Traversable, TreeNode}; use brk_types::{DateIndex, Dollars, Version}; use rayon::prelude::*; use vecdb::{ - AnyExportableVec, AnyStoredVec, Database, EagerVec, Exit, GenericStoredVec, PcoVec, + AnyExportableVec, AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, PcoVec, }; use crate::{ComputeIndexes, indexes}; @@ -16,13 +16,13 @@ pub const PERCENTILES: [u8; 19] = [ pub const PERCENTILES_LEN: usize = PERCENTILES.len(); #[derive(Clone)] -pub struct PricePercentiles { +pub struct CostBasisPercentiles { pub vecs: [Option>; PERCENTILES_LEN], } const VERSION: Version = Version::ZERO; -impl PricePercentiles { +impl CostBasisPercentiles { pub fn forced_import( db: &Database, name: &str, @@ -34,7 +34,7 @@ impl PricePercentiles { compute.then(|| { ComputedVecsFromDateIndex::forced_import( db, - &format!("{name}_price_pct{p:02}"), + &format!("{name}_cost_basis_pct{p:02}"), Source::Compute, version + VERSION, indexes, @@ -47,6 +47,17 @@ impl PricePercentiles { Ok(Self { vecs }) } + /// Get minimum length across dateindex-indexed vectors written in block loop. + pub fn min_stateful_dateindex_len(&self) -> usize { + self.vecs + .iter() + .filter_map(|v| v.as_ref()) + .filter_map(|v| v.dateindex.as_ref()) + .map(|v| v.len()) + .min() + .unwrap_or(usize::MAX) + } + /// Push percentile prices at date boundary. /// Only called when dateindex is Some (last height of the day). pub fn truncate_push( @@ -84,7 +95,7 @@ impl PricePercentiles { } } -impl PricePercentiles { +impl CostBasisPercentiles { pub fn write(&mut self) -> Result<()> { for vec in self.vecs.iter_mut().flatten() { if let Some(dateindex_vec) = vec.dateindex.as_mut() { @@ -116,13 +127,16 @@ impl PricePercentiles { } } -impl Traversable for PricePercentiles { +impl Traversable for CostBasisPercentiles { fn to_tree_node(&self) -> TreeNode { TreeNode::Branch( PERCENTILES .iter() .zip(self.vecs.iter()) - .filter_map(|(p, v)| v.as_ref().map(|v| (format!("pct{p:02}"), v.to_tree_node()))) + .filter_map(|(p, v)| { + v.as_ref() + .map(|v| (format!("cost_basis_pct{p:02}"), v.to_tree_node())) + }) .collect(), ) } diff --git a/crates/brk_computer/src/grouped/specialized/ratio.rs b/crates/brk_computer/src/internal/specialized/ratio.rs similarity index 97% rename from crates/brk_computer/src/grouped/specialized/ratio.rs rename to crates/brk_computer/src/internal/specialized/ratio.rs index 4ed4159c7..479869ea9 100644 --- a/crates/brk_computer/src/grouped/specialized/ratio.rs +++ b/crates/brk_computer/src/internal/specialized/ratio.rs @@ -8,7 +8,7 @@ use vecdb::{ use crate::{ ComputeIndexes, - grouped::{ + internal::{ ComputedStandardDeviationVecsFromDateIndex, LazyVecsFrom2FromDateIndex, PriceTimesRatio, StandardDeviationVecsOptions, source::Source, }, @@ -180,7 +180,7 @@ impl ComputedRatioVecsFromDateIndex { exit: &Exit, price_opt: Option<&impl IterableVec>, ) -> Result<()> { - let closes = price.timeindexes_to_price_close.dateindex.u(); + let closes = price.usd.timeindexes_to_price_close.dateindex.u(); let price = price_opt.unwrap_or_else(|| unsafe { std::mem::transmute(&self.price.u().dateindex) }); @@ -240,9 +240,7 @@ impl ComputedRatioVecsFromDateIndex { self.mut_ratio_vecs() .iter_mut() .try_for_each(|v| -> Result<()> { - v.validate_computed_version_or_reset( - Version::ZERO + v.inner_version() + ratio_version, - )?; + v.validate_computed_version_or_reset(ratio_version)?; Ok(()) })?; @@ -301,9 +299,10 @@ impl ComputedRatioVecsFromDateIndex { Ok(()) })?; - self.mut_ratio_vecs() - .into_iter() - .try_for_each(|v| v.safe_flush(exit))?; + { + let _lock = exit.lock(); + self.mut_ratio_vecs().into_iter().try_for_each(|v| v.flush())?; + } self.ratio_pct1.um().compute_rest( starting_indexes, diff --git a/crates/brk_computer/src/grouped/specialized/stddev.rs b/crates/brk_computer/src/internal/specialized/stddev.rs similarity index 97% rename from crates/brk_computer/src/grouped/specialized/stddev.rs rename to crates/brk_computer/src/internal/specialized/stddev.rs index 0ecf0d3a7..fe86f8ca3 100644 --- a/crates/brk_computer/src/grouped/specialized/stddev.rs +++ b/crates/brk_computer/src/internal/specialized/stddev.rs @@ -8,7 +8,7 @@ use vecdb::{ PcoVec, VecIndex, }; -use crate::{ComputeIndexes, grouped::source::Source, indexes, price, utils::OptionExt}; +use crate::{ComputeIndexes, internal::source::Source, indexes, price, utils::OptionExt}; use super::super::{ ClosePriceTimesRatio, ComputedVecsFromDateIndex, LazyVecsFrom2FromDateIndex, VecBuilderOptions, @@ -144,7 +144,7 @@ impl ComputedStandardDeviationVecsFromDateIndex { macro_rules! lazy_usd { ($band:expr, $suffix:expr) => { price_vecs - .map(|p| &p.timeindexes_to_price_close) + .map(|p| &p.usd.timeindexes_to_price_close) .zip($band.as_ref()) .filter(|_| options.price_bands()) .map(|(p, b)| { @@ -234,9 +234,7 @@ impl ComputedStandardDeviationVecsFromDateIndex { self.mut_stateful_date_vecs() .try_for_each(|v| -> Result<()> { - v.validate_computed_version_or_reset( - Version::ZERO + v.inner_version() + source_version, - )?; + v.validate_computed_version_or_reset(source_version)?; Ok(()) })?; @@ -382,8 +380,10 @@ impl ComputedStandardDeviationVecsFromDateIndex { drop(sma_iter); - self.mut_stateful_date_vecs() - .try_for_each(|v| v.safe_flush(exit))?; + { + let _lock = exit.lock(); + self.mut_stateful_date_vecs().try_for_each(|v| v.flush())?; + } self.mut_stateful_computed().try_for_each(|v| { v.compute_rest( diff --git a/crates/brk_computer/src/grouped/transforms.rs b/crates/brk_computer/src/internal/transforms.rs similarity index 78% rename from crates/brk_computer/src/grouped/transforms.rs rename to crates/brk_computer/src/internal/transforms.rs index ce142bb30..e4fea9135 100644 --- a/crates/brk_computer/src/grouped/transforms.rs +++ b/crates/brk_computer/src/internal/transforms.rs @@ -1,4 +1,4 @@ -use brk_types::{Bitcoin, Close, Dollars, Sats, StoredF32, StoredF64, StoredU32}; +use brk_types::{Bitcoin, Close, Dollars, Sats, StoredF32, StoredF64, StoredU32, StoredU64, Weight}; use vecdb::{BinaryTransform, UnaryTransform}; /// (Dollars, Dollars) -> Dollars addition @@ -12,8 +12,6 @@ impl BinaryTransform for DollarsPlus { } } -/// (Dollars, Dollars) -> Dollars subtraction -/// Used for computing net = profit - loss pub struct DollarsMinus; impl BinaryTransform for DollarsMinus { @@ -125,6 +123,36 @@ impl UnaryTransform for HalveDollars { } } +/// Sats -> Sats (identity transform for lazy references) +pub struct SatsIdentity; + +impl UnaryTransform for SatsIdentity { + #[inline(always)] + fn apply(sats: Sats) -> Sats { + sats + } +} + +/// Dollars -> Dollars (identity transform for lazy references) +pub struct DollarsIdentity; + +impl UnaryTransform for DollarsIdentity { + #[inline(always)] + fn apply(dollars: Dollars) -> Dollars { + dollars + } +} + +/// StoredF32 -> StoredF32 (identity transform for lazy references/proxies) +pub struct StoredF32Identity; + +impl UnaryTransform for StoredF32Identity { + #[inline(always)] + fn apply(v: StoredF32) -> StoredF32 { + v + } +} + /// Dollars * StoredF32 -> Dollars (price × ratio) pub struct PriceTimesRatio; @@ -272,6 +300,21 @@ impl BinaryTransform for PercentageU32F32 { } } +/// (StoredU64, StoredU64) -> StoredF32 percentage (a/b × 100) +/// Used for adoption ratio calculations (type_count / total_count × 100) +pub struct PercentageU64F32; + +impl BinaryTransform for PercentageU64F32 { + #[inline(always)] + fn apply(numerator: StoredU64, denominator: StoredU64) -> StoredF32 { + if *denominator == 0 { + StoredF32::default() + } else { + StoredF32::from((*numerator as f64 / *denominator as f64) * 100.0) + } + } +} + // === Volatility Transforms (SD × sqrt(N)) === /// StoredF32 × sqrt(7) -> StoredF32 (1-week volatility from daily SD) @@ -331,3 +374,59 @@ impl BinaryTransform, Dollars, StoredF32> for PercentageDiffClose } } } + +// === Block Fullness Transform === + +/// Weight -> StoredF32 percentage (weight / MAX_BLOCK × 100) +/// Used for computing block fullness as a percentage of max capacity +pub struct WeightToFullness; + +impl UnaryTransform for WeightToFullness { + #[inline(always)] + fn apply(weight: Weight) -> StoredF32 { + StoredF32::from(weight.fullness()) + } +} + +// === RSI Transform === + +/// (StoredF32, StoredF32) -> StoredF32 RSI formula: 100 * a / (a + b) +pub struct RsiFormula; + +impl BinaryTransform for RsiFormula { + #[inline(always)] + fn apply(avg_gain: StoredF32, avg_loss: StoredF32) -> StoredF32 { + let sum = *avg_gain + *avg_loss; + if sum == 0.0 { + StoredF32::from(50.0) + } else { + StoredF32::from(100.0 * *avg_gain / sum) + } + } +} + +// === MACD Transform === + +/// (StoredF32, StoredF32) -> StoredF32 difference (a - b) +pub struct DifferenceF32; + +impl BinaryTransform for DifferenceF32 { + #[inline(always)] + fn apply(a: StoredF32, b: StoredF32) -> StoredF32 { + StoredF32::from(*a - *b) + } +} + +/// (StoredF32, StoredF32) -> StoredF32 ratio (a / b) +pub struct RatioF32; + +impl BinaryTransform for RatioF32 { + #[inline(always)] + fn apply(a: StoredF32, b: StoredF32) -> StoredF32 { + if *b == 0.0 { + StoredF32::from(0.0) + } else { + StoredF32::from(*a / *b) + } + } +} diff --git a/crates/brk_computer/src/grouped/value/computed/from_dateindex.rs b/crates/brk_computer/src/internal/value/computed/from_dateindex.rs similarity index 95% rename from crates/brk_computer/src/grouped/value/computed/from_dateindex.rs rename to crates/brk_computer/src/internal/value/computed/from_dateindex.rs index 27f5955ee..0cac537ef 100644 --- a/crates/brk_computer/src/grouped/value/computed/from_dateindex.rs +++ b/crates/brk_computer/src/internal/value/computed/from_dateindex.rs @@ -5,13 +5,13 @@ use vecdb::{CollectableVec, Database, EagerVec, Exit, IterableCloneableVec, PcoV use crate::{ ComputeIndexes, - grouped::{ComputedVecsFromDateIndex, LazyVecsFromDateIndex, SatsToBitcoin}, + internal::{ComputedVecsFromDateIndex, LazyVecsFromDateIndex, SatsToBitcoin}, indexes, price, traits::ComputeFromBitcoin, utils::OptionExt, }; -use crate::grouped::{Source, VecBuilderOptions}; +use crate::internal::{Source, VecBuilderOptions}; #[derive(Clone, Traversable)] pub struct ComputedValueVecsFromDateIndex { @@ -104,6 +104,7 @@ impl ComputedValueVecsFromDateIndex { let dateindex_to_bitcoin = self.bitcoin.dateindex.u(); let dateindex_to_price_close = price .u() + .usd .timeindexes_to_price_close .dateindex .as_ref() diff --git a/crates/brk_computer/src/grouped/value/computed/from_height.rs b/crates/brk_computer/src/internal/value/computed/from_height.rs similarity index 94% rename from crates/brk_computer/src/grouped/value/computed/from_height.rs rename to crates/brk_computer/src/internal/value/computed/from_height.rs index a1d112dc6..0dd5aac49 100644 --- a/crates/brk_computer/src/grouped/value/computed/from_height.rs +++ b/crates/brk_computer/src/internal/value/computed/from_height.rs @@ -5,13 +5,13 @@ use vecdb::{CollectableVec, Database, EagerVec, Exit, IterableCloneableVec, PcoV use crate::{ ComputeIndexes, - grouped::{LazyVecsFromHeight, SatsToBitcoin, Source}, + internal::{LazyVecsFromHeight, SatsToBitcoin, Source}, indexes, price, traits::ComputeFromBitcoin, utils::OptionExt, }; -use crate::grouped::{ComputedVecsFromHeight, VecBuilderOptions}; +use crate::internal::{ComputedVecsFromHeight, VecBuilderOptions}; #[derive(Clone, Traversable)] pub struct ComputedValueVecsFromHeight { @@ -107,7 +107,7 @@ impl ComputedValueVecsFromHeight { } let height_to_bitcoin = &self.bitcoin.height; - let height_to_price_close = &price.u().chainindexes_to_price_close.height; + let height_to_price_close = &price.u().usd.chainindexes_to_price_close.height; if let Some(dollars) = self.dollars.as_mut() { dollars.compute_all(indexes, starting_indexes, exit, |v| { diff --git a/crates/brk_computer/src/grouped/value/computed/from_txindex.rs b/crates/brk_computer/src/internal/value/computed/from_txindex.rs similarity index 82% rename from crates/brk_computer/src/grouped/value/computed/from_txindex.rs rename to crates/brk_computer/src/internal/value/computed/from_txindex.rs index dea403a97..47abfa32a 100644 --- a/crates/brk_computer/src/grouped/value/computed/from_txindex.rs +++ b/crates/brk_computer/src/internal/value/computed/from_txindex.rs @@ -7,18 +7,19 @@ use vecdb::{ VecIndex, }; -use crate::{ComputeIndexes, grouped::Source, indexes, price, utils::OptionExt}; +use crate::{ComputeIndexes, internal::Source, indexes, price, utils::OptionExt}; -use crate::grouped::{ComputedVecsFromTxindex, VecBuilderOptions}; +use crate::internal::{ComputedVecsFromTxindex, VecBuilderOptions}; #[derive(Clone, Traversable)] pub struct ComputedValueVecsFromTxindex { pub sats: ComputedVecsFromTxindex, pub bitcoin_txindex: LazyVecFrom1, pub bitcoin: ComputedVecsFromTxindex, + // Derives directly from sats source (Eager) to avoid Lazy <- Lazy #[allow(clippy::type_complexity)] pub dollars_txindex: Option< - LazyVecFrom3>, + LazyVecFrom3>, >, pub dollars: Option>, } @@ -49,12 +50,15 @@ impl ComputedValueVecsFromTxindex { options, )?; - let source_vec = source.vec(); + // Compute sats source once - used by both bitcoin_txindex and dollars_txindex + let sats_source = source + .vec() + .unwrap_or_else(|| sats.txindex.u().boxed_clone()); let bitcoin_txindex = LazyVecFrom1::init( &name_btc, version + VERSION, - source_vec.unwrap_or_else(|| sats.txindex.u().boxed_clone()), + sats_source.clone(), |txindex: TxIndex, iter| iter.get_at(txindex.to_usize()).map(Bitcoin::from), ); @@ -67,23 +71,24 @@ impl ComputedValueVecsFromTxindex { options, )?; + // Derive directly from sats source (Eager) to avoid Lazy <- Lazy let dollars_txindex = price.map(|price| { LazyVecFrom3::init( &name_usd, version + VERSION, - bitcoin_txindex.boxed_clone(), + sats_source.clone(), indexer.vecs.tx.txindex_to_height.boxed_clone(), - price.chainindexes_to_price_close.height.boxed_clone(), + price.usd.chainindexes_to_price_close.height.boxed_clone(), |txindex: TxIndex, - txindex_to_btc_iter, + txindex_to_sats_iter, txindex_to_height_iter, height_to_price_close_iter| { let txindex = txindex.to_usize(); - txindex_to_btc_iter.get_at(txindex).and_then(|btc| { + txindex_to_sats_iter.get_at(txindex).and_then(|sats| { txindex_to_height_iter.get_at(txindex).and_then(|height| { height_to_price_close_iter .get_at(height.to_usize()) - .map(|close| *close * btc) + .map(|close| *close * Bitcoin::from(sats)) }) }) }, diff --git a/crates/brk_computer/src/grouped/value/computed/mod.rs b/crates/brk_computer/src/internal/value/computed/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/value/computed/mod.rs rename to crates/brk_computer/src/internal/value/computed/mod.rs diff --git a/crates/brk_computer/src/grouped/value/lazy/binary_from_height.rs b/crates/brk_computer/src/internal/value/lazy/binary_from_height.rs similarity index 96% rename from crates/brk_computer/src/grouped/value/lazy/binary_from_height.rs rename to crates/brk_computer/src/internal/value/lazy/binary_from_height.rs index c6c7ee6fe..fe2bc9115 100644 --- a/crates/brk_computer/src/grouped/value/lazy/binary_from_height.rs +++ b/crates/brk_computer/src/internal/value/lazy/binary_from_height.rs @@ -2,7 +2,7 @@ use brk_traversable::Traversable; use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec}; -use crate::grouped::{ComputedValueVecsFromHeight, LazyVecsFrom2FromHeight}; +use crate::internal::{ComputedValueVecsFromHeight, LazyVecsFrom2FromHeight}; /// Lazy value vecs computed from two `ComputedValueVecsFromHeight` sources via binary transforms. /// Used for computing coinbase = subsidy + fee. diff --git a/crates/brk_computer/src/grouped/value/lazy/from_dateindex.rs b/crates/brk_computer/src/internal/value/lazy/from_dateindex.rs similarity index 96% rename from crates/brk_computer/src/grouped/value/lazy/from_dateindex.rs rename to crates/brk_computer/src/internal/value/lazy/from_dateindex.rs index 1ff397267..3dd543db0 100644 --- a/crates/brk_computer/src/grouped/value/lazy/from_dateindex.rs +++ b/crates/brk_computer/src/internal/value/lazy/from_dateindex.rs @@ -2,7 +2,7 @@ use brk_traversable::Traversable; use brk_types::{Bitcoin, Dollars, Sats, Version}; use vecdb::{IterableCloneableVec, UnaryTransform}; -use crate::grouped::{ComputedValueVecsFromDateIndex, LazyVecsFromDateIndex}; +use crate::internal::{ComputedValueVecsFromDateIndex, LazyVecsFromDateIndex}; const VERSION: Version = Version::ZERO; diff --git a/crates/brk_computer/src/grouped/value/lazy/height.rs b/crates/brk_computer/src/internal/value/lazy/height.rs similarity index 100% rename from crates/brk_computer/src/grouped/value/lazy/height.rs rename to crates/brk_computer/src/internal/value/lazy/height.rs diff --git a/crates/brk_computer/src/grouped/value/lazy/mod.rs b/crates/brk_computer/src/internal/value/lazy/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/value/lazy/mod.rs rename to crates/brk_computer/src/internal/value/lazy/mod.rs diff --git a/crates/brk_computer/src/grouped/value/lazy/value_height.rs b/crates/brk_computer/src/internal/value/lazy/value_height.rs similarity index 96% rename from crates/brk_computer/src/grouped/value/lazy/value_height.rs rename to crates/brk_computer/src/internal/value/lazy/value_height.rs index ca6096ab7..045ef0c82 100644 --- a/crates/brk_computer/src/grouped/value/lazy/value_height.rs +++ b/crates/brk_computer/src/internal/value/lazy/value_height.rs @@ -6,7 +6,7 @@ use vecdb::{ LazyVecFrom2, PcoVec, }; -use crate::grouped::{ClosePriceTimesSats, SatsToBitcoin, Source}; +use crate::internal::{ClosePriceTimesSats, SatsToBitcoin, Source}; #[derive(Clone, Traversable)] pub struct ComputedHeightValueVecs { diff --git a/crates/brk_computer/src/grouped/value/mod.rs b/crates/brk_computer/src/internal/value/mod.rs similarity index 100% rename from crates/brk_computer/src/grouped/value/mod.rs rename to crates/brk_computer/src/internal/value/mod.rs diff --git a/crates/brk_computer/src/lib.rs b/crates/brk_computer/src/lib.rs index a758bcf4b..39e66afdf 100644 --- a/crates/brk_computer/src/lib.rs +++ b/crates/brk_computer/src/lib.rs @@ -11,39 +11,42 @@ use brk_types::Version; use log::info; use vecdb::Exit; -mod blks; -mod chain; +mod blocks; +mod transactions; +mod scripts; +mod positions; mod cointime; mod constants; -mod fetched; -mod grouped; +mod internal; mod indexes; mod market; mod pools; mod price; -mod stateful; +mod distribution; +mod supply; mod traits; -mod txins; -mod txouts; +mod inputs; +mod outputs; mod utils; use indexes::ComputeIndexes; -use utils::OptionExt; #[derive(Clone, Traversable)] pub struct Computer { - pub blks: blks::Vecs, - pub chain: chain::Vecs, + pub blocks: blocks::Vecs, + pub transactions: transactions::Vecs, + pub scripts: scripts::Vecs, + pub positions: positions::Vecs, pub cointime: cointime::Vecs, pub constants: constants::Vecs, - pub fetched: Option, pub indexes: indexes::Vecs, pub market: market::Vecs, pub pools: pools::Vecs, pub price: Option, - pub stateful: stateful::Vecs, - pub txins: txins::Vecs, - pub txouts: txouts::Vecs, + pub distribution: distribution::Vecs, + pub supply: supply::Vecs, + pub inputs: inputs::Vecs, + pub outputs: outputs::Vecs, } const VERSION: Version = Version::new(4); @@ -64,51 +67,47 @@ impl Computer { let big_thread = || thread::Builder::new().stack_size(STACK_SIZE); let i = Instant::now(); - let (indexes, fetched, blks, txins, txouts) = thread::scope(|s| -> Result<_> { - let fetched_handle = fetcher - .map(|fetcher| { - big_thread().spawn_scoped(s, move || { - fetched::Vecs::forced_import(outputs_path, fetcher, VERSION) - }) - }) - .transpose()?; - - let blks_handle = big_thread() - .spawn_scoped(s, || blks::Vecs::forced_import(&computed_path, VERSION))?; - - let txins_handle = big_thread() - .spawn_scoped(s, || txins::Vecs::forced_import(&computed_path, VERSION))?; - - let txouts_handle = big_thread() - .spawn_scoped(s, || txouts::Vecs::forced_import(&computed_path, VERSION))?; + let (indexes, positions) = thread::scope(|s| -> Result<_> { + let positions_handle = big_thread() + .spawn_scoped(s, || positions::Vecs::forced_import(&computed_path, VERSION))?; let indexes = indexes::Vecs::forced_import(&computed_path, VERSION, indexer)?; - let fetched = fetched_handle.map(|h| h.join().unwrap()).transpose()?; - let blks = blks_handle.join().unwrap()?; - let txins = txins_handle.join().unwrap()?; - let txouts = txouts_handle.join().unwrap()?; + let positions = positions_handle.join().unwrap()?; - Ok((indexes, fetched, blks, txins, txouts)) + Ok((indexes, positions)) })?; - info!( - "Imported indexes/fetched/blks/txins/txouts in {:?}", - i.elapsed() - ); + info!("Imported indexes/positions in {:?}", i.elapsed()); + + // inputs/outputs need indexes for count imports + let i = Instant::now(); + let (inputs, outputs) = thread::scope(|s| -> Result<_> { + let inputs_handle = big_thread().spawn_scoped(s, || { + inputs::Vecs::forced_import(&computed_path, VERSION, &indexes) + })?; + + let outputs_handle = big_thread().spawn_scoped(s, || { + outputs::Vecs::forced_import(&computed_path, VERSION, &indexes) + })?; + + let inputs = inputs_handle.join().unwrap()?; + let outputs = outputs_handle.join().unwrap()?; + + Ok((inputs, outputs)) + })?; + info!("Imported inputs/outputs in {:?}", i.elapsed()); let i = Instant::now(); let constants = constants::Vecs::new(VERSION, &indexes); // Price must be created before market since market's lazy vecs reference price - let price = fetched - .is_some() - .then(|| price::Vecs::forced_import(&computed_path, VERSION, &indexes).unwrap()); - let market = - market::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref())?; - info!("Imported price/constants/market in {:?}", i.elapsed()); + let price = price::Vecs::forced_import(&computed_path, VERSION, &indexes, fetcher)?; + let price = price.has_fetcher().then_some(price); + info!("Imported price/constants in {:?}", i.elapsed()); let i = Instant::now(); - let (chain, pools, cointime) = thread::scope(|s| -> Result<_> { - let chain_handle = big_thread().spawn_scoped(s, || { - chain::Vecs::forced_import( + let (blocks, transactions, scripts, pools, cointime) = thread::scope(|s| -> Result<_> { + // Import blocks module + let blocks_handle = big_thread().spawn_scoped(s, || { + blocks::Vecs::forced_import( &computed_path, VERSION, indexer, @@ -117,45 +116,89 @@ impl Computer { ) })?; + // Import transactions module + let transactions_handle = big_thread().spawn_scoped(s, || { + transactions::Vecs::forced_import( + &computed_path, + VERSION, + indexer, + &indexes, + price.as_ref(), + ) + })?; + + // Import scripts module (depends on outputs for adoption ratio denominators) + let scripts_handle = big_thread().spawn_scoped(s, || { + scripts::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref(), &outputs) + })?; + let cointime = cointime::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref())?; - let chain = chain_handle.join().unwrap()?; + let blocks = blocks_handle.join().unwrap()?; + let transactions = transactions_handle.join().unwrap()?; + let scripts = scripts_handle.join().unwrap()?; - // pools depends on chain for lazy dominance vecs + // pools depends on blocks and transactions for lazy dominance vecs let pools = pools::Vecs::forced_import( &computed_path, VERSION, &indexes, price.as_ref(), - &chain, + &blocks, + &transactions, )?; - Ok((chain, pools, cointime)) + Ok((blocks, transactions, scripts, pools, cointime)) })?; - info!("Imported chain/pools/cointime in {:?}", i.elapsed()); + info!("Imported blocks/transactions/scripts/pools/cointime in {:?}", i.elapsed()); // Threads inside let i = Instant::now(); - let stateful = - stateful::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref())?; - info!("Imported stateful in {:?}", i.elapsed()); + let distribution = + distribution::Vecs::forced_import(&computed_path, VERSION, &indexes, price.as_ref())?; + info!("Imported distribution in {:?}", i.elapsed()); + + // Supply must be imported after distribution (references distribution's supply) + let i = Instant::now(); + let supply = supply::Vecs::forced_import( + &computed_path, + VERSION, + &indexes, + price.as_ref(), + &distribution, + )?; + info!("Imported supply in {:?}", i.elapsed()); + + // Market must be imported after distribution and transactions (for NVT indicator) + let i = Instant::now(); + let market = market::Vecs::forced_import( + &computed_path, + VERSION, + &indexes, + price.as_ref(), + &distribution, + &transactions, + )?; + info!("Imported market in {:?}", i.elapsed()); info!("Total import time: {:?}", import_start.elapsed()); let this = Self { + blocks, + transactions, + scripts, constants, market, - stateful, - chain, - blks, + distribution, + supply, + positions, pools, cointime, indexes, - txins, - fetched, + inputs, price, - txouts, + outputs, }; Self::retain_databases(&computed_path)?; @@ -166,16 +209,19 @@ impl Computer { /// Removes database folders that are no longer in use. fn retain_databases(computed_path: &Path) -> Result<()> { const EXPECTED_DBS: &[&str] = &[ - blks::DB_NAME, - chain::DB_NAME, + blocks::DB_NAME, + transactions::DB_NAME, + scripts::DB_NAME, + positions::DB_NAME, cointime::DB_NAME, indexes::DB_NAME, market::DB_NAME, pools::DB_NAME, price::DB_NAME, - stateful::DB_NAME, - txins::DB_NAME, - txouts::DB_NAME, + distribution::DB_NAME, + supply::DB_NAME, + inputs::DB_NAME, + outputs::DB_NAME, ]; if !computed_path.exists() { @@ -209,70 +255,105 @@ impl Computer { exit: &Exit, ) -> Result<()> { let compute_start = Instant::now(); + + // Compute blocks.time early (height_to_date, height_to_timestamp_fixed, height_to_date_fixed) + // These are needed by indexes::block to compute height_to_dateindex + info!("Computing blocks.time (early)..."); + let i = Instant::now(); + self.blocks + .time + .compute_early(indexer, starting_indexes.height, exit)?; + info!("Computed blocks.time (early) in {:?}", i.elapsed()); + info!("Computing indexes..."); let i = Instant::now(); - let mut starting_indexes = self.indexes.compute(indexer, starting_indexes, exit)?; + let mut starting_indexes = self.indexes.compute(indexer, &self.blocks.time, starting_indexes, exit)?; info!("Computed indexes in {:?}", i.elapsed()); - if let Some(fetched) = self.fetched.as_mut() { - info!("Computing fetched..."); + if let Some(price) = self.price.as_mut() { + info!("Fetching prices..."); let i = Instant::now(); - fetched.compute(indexer, &self.indexes, &starting_indexes, exit)?; - info!("Computed fetched in {:?}", i.elapsed()); + price.fetch(indexer, &self.indexes, &starting_indexes, exit)?; + info!("Fetched prices in {:?}", i.elapsed()); info!("Computing prices..."); let i = Instant::now(); - self.price.um().compute(&starting_indexes, fetched, exit)?; + price.compute(&starting_indexes, exit)?; info!("Computed prices in {:?}", i.elapsed()); } thread::scope(|scope| -> Result<()> { - let blks = scope.spawn(|| -> Result<()> { - info!("Computing BLKs metadata..."); + let positions = scope.spawn(|| -> Result<()> { + info!("Computing positions metadata..."); let i = Instant::now(); - self.blks + self.positions .compute(indexer, &starting_indexes, reader, exit)?; - info!("Computed blk in {:?}", i.elapsed()); + info!("Computed positions in {:?}", i.elapsed()); Ok(()) }); - // Txins must complete before txouts (txouts needs txinindex_to_txoutindex) - // and before chain (chain needs txinindex_to_value) - info!("Computing txins..."); + // Inputs must complete first + info!("Computing inputs..."); let i = Instant::now(); - self.txins.compute(indexer, &starting_indexes, exit)?; - info!("Computed txins in {:?}", i.elapsed()); + self.inputs + .compute(indexer, &self.indexes, &starting_indexes, exit)?; + info!("Computed inputs in {:?}", i.elapsed()); - let txouts = scope.spawn(|| -> Result<()> { - info!("Computing txouts..."); - let i = Instant::now(); - self.txouts - .compute(indexer, &self.txins, &starting_indexes, exit)?; - info!("Computed txouts in {:?}", i.elapsed()); - Ok(()) - }); - - info!("Computing chain..."); + // Scripts (needed for outputs.count.utxo_count) + info!("Computing scripts..."); let i = Instant::now(); - self.chain.compute( + self.scripts + .compute(indexer, &self.indexes, self.price.as_ref(), &starting_indexes, exit)?; + info!("Computed scripts in {:?}", i.elapsed()); + + // Outputs depends on inputs and scripts (for utxo_count) + info!("Computing outputs..."); + let i = Instant::now(); + self.outputs.compute( indexer, &self.indexes, - &self.txins, + &self.inputs, + &self.scripts, + &starting_indexes, + exit, + )?; + info!("Computed outputs in {:?}", i.elapsed()); + + // Transactions: count, versions, size, fees, volume + info!("Computing transactions..."); + let i = Instant::now(); + self.transactions.compute( + indexer, + &self.indexes, + &self.inputs, + &self.outputs, &starting_indexes, self.price.as_ref(), exit, )?; - info!("Computed chain in {:?}", i.elapsed()); + info!("Computed transactions in {:?}", i.elapsed()); + + // Blocks depends on transactions.fees for rewards computation + info!("Computing blocks..."); + let i = Instant::now(); + self.blocks.compute( + indexer, + &self.indexes, + &self.transactions, + &starting_indexes, + self.price.as_ref(), + exit, + )?; + info!("Computed blocks in {:?}", i.elapsed()); if let Some(price) = self.price.as_ref() { info!("Computing market..."); let i = Instant::now(); - self.market.compute(price, &starting_indexes, exit)?; + self.market.compute(price, &self.blocks, &self.distribution, &starting_indexes, exit)?; info!("Computed market in {:?}", i.elapsed()); } - blks.join().unwrap()?; - txouts.join().unwrap()?; + positions.join().unwrap()?; Ok(()) })?; @@ -292,31 +373,49 @@ impl Computer { Ok(()) }); - info!("Computing stateful..."); + info!("Computing distribution..."); let i = Instant::now(); - self.stateful.compute( + self.distribution.compute( indexer, &self.indexes, - &self.txins, - &self.chain, + &self.inputs, + &self.outputs, + &self.transactions, + &self.blocks, self.price.as_ref(), &mut starting_indexes, exit, )?; - info!("Computed stateful in {:?}", i.elapsed()); + info!("Computed distribution in {:?}", i.elapsed()); pools.join().unwrap()?; Ok(()) })?; + // Supply must be computed after distribution (uses actual circulating supply) + info!("Computing supply..."); + let i = Instant::now(); + self.supply.compute( + &self.indexes, + &self.scripts, + &self.blocks, + &self.transactions, + &self.distribution, + &starting_indexes, + self.price.as_ref(), + exit, + )?; + info!("Computed supply in {:?}", i.elapsed()); + info!("Computing cointime..."); let i = Instant::now(); self.cointime.compute( &self.indexes, &starting_indexes, self.price.as_ref(), - &self.chain, - &self.stateful, + &self.blocks, + &self.supply, + &self.distribution, exit, )?; info!("Computed cointime in {:?}", i.elapsed()); @@ -332,12 +431,18 @@ impl Computer { use brk_traversable::Traversable; std::iter::empty() - .chain(self.blks.iter_any_exportable().map(|v| (blks::DB_NAME, v))) + .chain(self.blocks.iter_any_exportable().map(|v| (blocks::DB_NAME, v))) .chain( - self.chain + self.transactions .iter_any_exportable() - .map(|v| (chain::DB_NAME, v)), + .map(|v| (transactions::DB_NAME, v)), ) + .chain( + self.scripts + .iter_any_exportable() + .map(|v| (scripts::DB_NAME, v)), + ) + .chain(self.positions.iter_any_exportable().map(|v| (positions::DB_NAME, v))) .chain( self.cointime .iter_any_exportable() @@ -348,11 +453,6 @@ impl Computer { .iter_any_exportable() .map(|v| (constants::DB_NAME, v)), ) - .chain( - self.fetched - .iter_any_exportable() - .map(|v| (fetched::DB_NAME, v)), - ) .chain( self.indexes .iter_any_exportable() @@ -374,19 +474,24 @@ impl Computer { .map(|v| (price::DB_NAME, v)), ) .chain( - self.stateful + self.distribution .iter_any_exportable() - .map(|v| (stateful::DB_NAME, v)), + .map(|v| (distribution::DB_NAME, v)), ) .chain( - self.txins + self.supply .iter_any_exportable() - .map(|v| (txins::DB_NAME, v)), + .map(|v| (supply::DB_NAME, v)), ) .chain( - self.txouts + self.inputs .iter_any_exportable() - .map(|v| (txouts::DB_NAME, v)), + .map(|v| (inputs::DB_NAME, v)), + ) + .chain( + self.outputs + .iter_any_exportable() + .map(|v| (outputs::DB_NAME, v)), ) } } diff --git a/crates/brk_computer/src/market/ath/compute.rs b/crates/brk_computer/src/market/ath/compute.rs index 07601a89c..0013cb6e9 100644 --- a/crates/brk_computer/src/market/ath/compute.rs +++ b/crates/brk_computer/src/market/ath/compute.rs @@ -14,13 +14,13 @@ impl Vecs { ) -> Result<()> { self.height_to_price_ath.compute_all_time_high( starting_indexes.height, - &price.chainindexes_to_price_high.height, + &price.usd.chainindexes_to_price_high.height, exit, )?; self.height_to_price_drawdown.compute_drawdown( starting_indexes.height, - &price.chainindexes_to_price_close.height, + &price.usd.chainindexes_to_price_close.height, &self.height_to_price_ath, exit, )?; @@ -29,7 +29,7 @@ impl Vecs { .compute_all(starting_indexes, exit, |v| { v.compute_all_time_high( starting_indexes.dateindex, - price.timeindexes_to_price_high.dateindex.u(), + price.usd.timeindexes_to_price_high.dateindex.u(), exit, )?; Ok(()) @@ -37,7 +37,7 @@ impl Vecs { self.indexes_to_days_since_price_ath .compute_all(starting_indexes, exit, |v| { - let mut high_iter = price.timeindexes_to_price_high.dateindex.u().into_iter(); + let mut high_iter = price.usd.timeindexes_to_price_high.dateindex.u().into_iter(); let mut prev = None; v.compute_transform( starting_indexes.dateindex, diff --git a/crates/brk_computer/src/market/ath/import.rs b/crates/brk_computer/src/market/ath/import.rs index 9802e6514..cf48cc264 100644 --- a/crates/brk_computer/src/market/ath/import.rs +++ b/crates/brk_computer/src/market/ath/import.rs @@ -4,7 +4,7 @@ use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec}; use super::Vecs; use crate::{ - grouped::{ + internal::{ ComputedVecsFromDateIndex, LazyVecsFrom2FromDateIndex, LazyVecsFromDateIndex, PercentageDiffCloseDollars, Source, StoredU16ToYears, VecBuilderOptions, }, @@ -54,7 +54,7 @@ impl Vecs { LazyVecsFrom2FromDateIndex::from_computed::( "price_drawdown", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &indexes_to_price_ath, ); diff --git a/crates/brk_computer/src/market/ath/vecs.rs b/crates/brk_computer/src/market/ath/vecs.rs index 5b7785b50..8547c4600 100644 --- a/crates/brk_computer/src/market/ath/vecs.rs +++ b/crates/brk_computer/src/market/ath/vecs.rs @@ -2,7 +2,7 @@ use brk_traversable::Traversable; use brk_types::{Close, Dollars, Height, StoredF32, StoredU16}; use vecdb::{EagerVec, PcoVec}; -use crate::grouped::{ +use crate::internal::{ ComputedVecsFromDateIndex, LazyVecsFrom2FromDateIndex, LazyVecsFromDateIndex, }; diff --git a/crates/brk_computer/src/market/compute.rs b/crates/brk_computer/src/market/compute.rs index e98779600..6ee870f61 100644 --- a/crates/brk_computer/src/market/compute.rs +++ b/crates/brk_computer/src/market/compute.rs @@ -1,8 +1,7 @@ use brk_error::Result; use vecdb::Exit; -use crate::{price, ComputeIndexes}; -use crate::utils::OptionExt; +use crate::{blocks, distribution, price, ComputeIndexes}; use super::Vecs; @@ -10,21 +9,21 @@ impl Vecs { pub fn compute( &mut self, price: &price::Vecs, + blocks: &blocks::Vecs, + distribution: &distribution::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { // ATH metrics (independent) self.ath.compute(price, starting_indexes, exit)?; - // History metrics (independent) - self.history.compute(price, starting_indexes, exit)?; + // Lookback metrics (independent) + self.lookback.compute(price, starting_indexes, exit)?; - // Volatility metrics (depends on history._1d_price_returns) - self.volatility.compute( - starting_indexes, - exit, - self.history._1d_price_returns.dateindex.u(), - )?; + // Returns metrics (depends on lookback) + self.returns.compute(starting_indexes, exit)?; + + // Volatility: all fields are lazy (derived from returns SD) // Range metrics (independent) self.range.compute(price, starting_indexes, exit)?; @@ -35,6 +34,17 @@ impl Vecs { // DCA metrics self.dca.compute(price, starting_indexes, exit)?; + self.indicators.compute( + &blocks.rewards, + &self.returns, + &self.moving_average, + &self.range, + price, + distribution, + starting_indexes, + exit, + )?; + let _lock = exit.lock(); self.db.compact()?; Ok(()) diff --git a/crates/brk_computer/src/market/dca/compute.rs b/crates/brk_computer/src/market/dca/compute.rs index 23e983c7f..cc2562971 100644 --- a/crates/brk_computer/src/market/dca/compute.rs +++ b/crates/brk_computer/src/market/dca/compute.rs @@ -17,7 +17,7 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close = price.timeindexes_to_price_close.dateindex.u(); + let close = price.usd.timeindexes_to_price_close.dateindex.u(); // DCA by period - stack and avg_price self._1w_dca_stack diff --git a/crates/brk_computer/src/market/dca/import.rs b/crates/brk_computer/src/market/dca/import.rs index cfc424b17..11c2610a5 100644 --- a/crates/brk_computer/src/market/dca/import.rs +++ b/crates/brk_computer/src/market/dca/import.rs @@ -4,7 +4,7 @@ use vecdb::Database; use super::Vecs; use crate::{ - grouped::{ + internal::{ ComputedVecsFromDateIndex, LazyVecsFrom2FromDateIndex, PercentageDiffCloseDollars, Source, VecBuilderOptions, }, @@ -221,74 +221,74 @@ impl Vecs { let _1w_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "1w_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_1w_dca_avg_price, ); let _1m_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "1m_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_1m_dca_avg_price, ); let _3m_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "3m_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_3m_dca_avg_price, ); let _6m_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "6m_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_6m_dca_avg_price, ); let _1y_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "1y_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_1y_dca_avg_price, ); let _2y_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "2y_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_2y_dca_avg_price, ); let _3y_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "3y_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_3y_dca_avg_price, ); let _4y_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "4y_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_4y_dca_avg_price, ); let _5y_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "5y_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_5y_dca_avg_price, ); let _6y_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "6y_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_6y_dca_avg_price, ); let _8y_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "8y_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_8y_dca_avg_price, ); let _10y_dca_returns = LazyVecsFrom2FromDateIndex::from_computed::( "10y_dca_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &_10y_dca_avg_price, ); @@ -535,77 +535,77 @@ impl Vecs { LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2025_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2025_avg_price, ); let dca_class_2024_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2024_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2024_avg_price, ); let dca_class_2023_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2023_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2023_avg_price, ); let dca_class_2022_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2022_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2022_avg_price, ); let dca_class_2021_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2021_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2021_avg_price, ); let dca_class_2020_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2020_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2020_avg_price, ); let dca_class_2019_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2019_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2019_avg_price, ); let dca_class_2018_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2018_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2018_avg_price, ); let dca_class_2017_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2017_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2017_avg_price, ); let dca_class_2016_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2016_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2016_avg_price, ); let dca_class_2015_returns = LazyVecsFrom2FromDateIndex::from_computed::( "dca_class_2015_returns", version + v0, - &price.timeindexes_to_price_close, + &price.usd.timeindexes_to_price_close, &dca_class_2015_avg_price, ); diff --git a/crates/brk_computer/src/market/dca/vecs.rs b/crates/brk_computer/src/market/dca/vecs.rs index 4f7c27809..6bf14b62b 100644 --- a/crates/brk_computer/src/market/dca/vecs.rs +++ b/crates/brk_computer/src/market/dca/vecs.rs @@ -1,7 +1,7 @@ use brk_traversable::Traversable; use brk_types::{Close, Dollars, Sats, StoredF32}; -use crate::grouped::{ComputedVecsFromDateIndex, LazyVecsFrom2FromDateIndex}; +use crate::internal::{ComputedVecsFromDateIndex, LazyVecsFrom2FromDateIndex}; /// Dollar-cost averaging metrics by time period and year class #[derive(Clone, Traversable)] diff --git a/crates/brk_computer/src/market/history/import.rs b/crates/brk_computer/src/market/history/import.rs deleted file mode 100644 index dd2edb50b..000000000 --- a/crates/brk_computer/src/market/history/import.rs +++ /dev/null @@ -1,308 +0,0 @@ -use brk_error::Result; -use brk_types::Version; -use vecdb::Database; - -use super::Vecs; -use crate::{ - grouped::{ - ComputedVecsFromDateIndex, LazyVecsFrom2FromDateIndex, PercentageDiffCloseDollars, Source, - VecBuilderOptions, - }, - indexes, price, -}; - -impl Vecs { - pub fn forced_import( - db: &Database, - version: Version, - indexes: &indexes::Vecs, - price: &price::Vecs, - ) -> Result { - let v0 = Version::ZERO; - let last = VecBuilderOptions::default().add_last(); - - let price_1d_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_1d_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_1w_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_1w_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_1m_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_1m_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_3m_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_3m_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_6m_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_6m_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_1y_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_1y_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_2y_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_2y_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_3y_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_3y_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_4y_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_4y_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_5y_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_5y_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_6y_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_6y_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_8y_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_8y_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - let price_10y_ago = ComputedVecsFromDateIndex::forced_import( - db, - "price_10y_ago", - Source::Compute, - version + v0, - indexes, - last, - )?; - - let _1d_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "1d_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_1d_ago, - ); - let _1w_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "1w_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_1w_ago, - ); - let _1m_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "1m_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_1m_ago, - ); - let _3m_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "3m_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_3m_ago, - ); - let _6m_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "6m_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_6m_ago, - ); - let _1y_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "1y_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_1y_ago, - ); - let _2y_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "2y_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_2y_ago, - ); - let _3y_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "3y_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_3y_ago, - ); - let _4y_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "4y_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_4y_ago, - ); - let _5y_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "5y_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_5y_ago, - ); - let _6y_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "6y_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_6y_ago, - ); - let _8y_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "8y_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_8y_ago, - ); - let _10y_price_returns = - LazyVecsFrom2FromDateIndex::from_computed::( - "10y_price_returns", - version + v0, - &price.timeindexes_to_price_close, - &price_10y_ago, - ); - - Ok(Self { - price_1d_ago, - price_1w_ago, - price_1m_ago, - price_3m_ago, - price_6m_ago, - price_1y_ago, - price_2y_ago, - price_3y_ago, - price_4y_ago, - price_5y_ago, - price_6y_ago, - price_8y_ago, - price_10y_ago, - - _1d_price_returns, - _1w_price_returns, - _1m_price_returns, - _3m_price_returns, - _6m_price_returns, - _1y_price_returns, - _2y_price_returns, - _3y_price_returns, - _4y_price_returns, - _5y_price_returns, - _6y_price_returns, - _8y_price_returns, - _10y_price_returns, - - _2y_cagr: ComputedVecsFromDateIndex::forced_import( - db, - "2y_cagr", - Source::Compute, - version + v0, - indexes, - last, - )?, - _3y_cagr: ComputedVecsFromDateIndex::forced_import( - db, - "3y_cagr", - Source::Compute, - version + v0, - indexes, - last, - )?, - _4y_cagr: ComputedVecsFromDateIndex::forced_import( - db, - "4y_cagr", - Source::Compute, - version + v0, - indexes, - last, - )?, - _5y_cagr: ComputedVecsFromDateIndex::forced_import( - db, - "5y_cagr", - Source::Compute, - version + v0, - indexes, - last, - )?, - _6y_cagr: ComputedVecsFromDateIndex::forced_import( - db, - "6y_cagr", - Source::Compute, - version + v0, - indexes, - last, - )?, - _8y_cagr: ComputedVecsFromDateIndex::forced_import( - db, - "8y_cagr", - Source::Compute, - version + v0, - indexes, - last, - )?, - _10y_cagr: ComputedVecsFromDateIndex::forced_import( - db, - "10y_cagr", - Source::Compute, - version + v0, - indexes, - last, - )?, - }) - } -} diff --git a/crates/brk_computer/src/market/import.rs b/crates/brk_computer/src/market/import.rs index 3537bc26c..24b7d3c87 100644 --- a/crates/brk_computer/src/market/import.rs +++ b/crates/brk_computer/src/market/import.rs @@ -5,10 +5,11 @@ use brk_traversable::Traversable; use brk_types::Version; use vecdb::{Database, PAGE_SIZE}; -use crate::{indexes, price}; +use crate::{distribution, indexes, price, transactions}; use super::{ - AthVecs, DcaVecs, HistoryVecs, MovingAverageVecs, RangeVecs, Vecs, VolatilityVecs, + AthVecs, DcaVecs, IndicatorsVecs, LookbackVecs, MovingAverageVecs, RangeVecs, ReturnsVecs, + Vecs, VolatilityVecs, }; impl Vecs { @@ -17,6 +18,8 @@ impl Vecs { parent_version: Version, indexes: &indexes::Vecs, price: Option<&price::Vecs>, + distribution: &distribution::Vecs, + transactions: &transactions::Vecs, ) -> Result { let db = Database::open(&parent_path.join(super::DB_NAME))?; db.set_min_len(PAGE_SIZE * 1_000_000)?; @@ -26,20 +29,24 @@ impl Vecs { let price = price.expect("price required for market"); let ath = AthVecs::forced_import(&db, version, indexes, price)?; - let volatility = VolatilityVecs::forced_import(&db, version, indexes)?; + let lookback = LookbackVecs::forced_import(&db, version, indexes)?; + let returns = ReturnsVecs::forced_import(&db, version, indexes, price, &lookback)?; + let volatility = VolatilityVecs::forced_import(version, &returns); let range = RangeVecs::forced_import(&db, version, indexes)?; let moving_average = MovingAverageVecs::forced_import(&db, version, indexes, Some(price))?; - let history = HistoryVecs::forced_import(&db, version, indexes, price)?; let dca = DcaVecs::forced_import(&db, version, indexes, price)?; + let indicators = IndicatorsVecs::forced_import(&db, version, indexes, true, distribution, transactions, &moving_average)?; let this = Self { db, ath, + lookback, + returns, volatility, range, moving_average, - history, dca, + indicators, }; this.db.retain_regions( diff --git a/crates/brk_computer/src/market/indicators/compute.rs b/crates/brk_computer/src/market/indicators/compute.rs new file mode 100644 index 000000000..17693ba6e --- /dev/null +++ b/crates/brk_computer/src/market/indicators/compute.rs @@ -0,0 +1,250 @@ +use brk_error::Result; +use brk_types::{StoredF32, Version}; +use vecdb::{AnyVec, Exit, TypedVecIterator}; + +use super::{ + super::{moving_average, range, returns::Vecs as ReturnsVecs}, + Vecs, +}; +use crate::{ComputeIndexes, blocks, distribution, price, utils::OptionExt}; + +impl Vecs { + #[allow(clippy::too_many_arguments)] + pub fn compute( + &mut self, + rewards: &blocks::RewardsVecs, + returns: &ReturnsVecs, + moving_average: &moving_average::Vecs, + range: &range::Vecs, + price: &price::Vecs, + distribution: &distribution::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + if let (Some(puell), Some(sma), Some(coinbase_dollars)) = ( + self.indexes_to_puell_multiple.as_mut(), + rewards.indexes_to_subsidy_usd_1y_sma.as_ref(), + rewards.indexes_to_coinbase.dollars.as_ref(), + ) { + let date_to_coinbase_usd_sum = coinbase_dollars.dateindex.unwrap_sum(); + + puell.compute_all(starting_indexes, exit, |v| { + v.compute_divide( + starting_indexes.dateindex, + date_to_coinbase_usd_sum, + sma.dateindex.as_ref().unwrap(), + exit, + )?; + Ok(()) + })?; + } + + let returns_dateindex = returns._1d_price_returns.dateindex.u(); + + self.dateindex_to_rsi_gains.compute_transform( + starting_indexes.dateindex, + returns_dateindex, + |(i, ret, ..)| (i, StoredF32::from((*ret).max(0.0))), + exit, + )?; + + self.dateindex_to_rsi_losses.compute_transform( + starting_indexes.dateindex, + returns_dateindex, + |(i, ret, ..)| (i, StoredF32::from((-*ret).max(0.0))), + exit, + )?; + + self.dateindex_to_rsi_avg_gain_14d.compute_sma( + starting_indexes.dateindex, + &self.dateindex_to_rsi_gains, + 14, + exit, + )?; + + self.dateindex_to_rsi_avg_loss_14d.compute_sma( + starting_indexes.dateindex, + &self.dateindex_to_rsi_losses, + 14, + exit, + )?; + + let ema12 = moving_average + .indexes_to_price_12d_ema + .price + .u() + .dateindex + .u(); + let ema26 = moving_average + .indexes_to_price_26d_ema + .price + .u() + .dateindex + .u(); + + self.dateindex_to_macd_line.compute_transform2( + starting_indexes.dateindex, + ema12, + ema26, + |(i, a, b, _)| (i, StoredF32::from(*a - *b)), + exit, + )?; + + self.dateindex_to_macd_signal.compute_ema( + starting_indexes.dateindex, + &self.dateindex_to_macd_line, + 9, + exit, + )?; + + // Stochastic RSI: StochRSI = (RSI - min) / (max - min) * 100 + self.dateindex_to_rsi_14d_min.compute_min( + starting_indexes.dateindex, + &self.dateindex_to_rsi_14d, + 14, + exit, + )?; + + self.dateindex_to_rsi_14d_max.compute_max( + starting_indexes.dateindex, + &self.dateindex_to_rsi_14d, + 14, + exit, + )?; + + self.dateindex_to_stoch_rsi.compute_transform3( + starting_indexes.dateindex, + &self.dateindex_to_rsi_14d, + &self.dateindex_to_rsi_14d_min, + &self.dateindex_to_rsi_14d_max, + |(i, rsi, min, max, ..)| { + let range = *max - *min; + let stoch = if range == 0.0 { + StoredF32::from(50.0) + } else { + StoredF32::from((*rsi - *min) / range * 100.0) + }; + (i, stoch) + }, + exit, + )?; + + self.dateindex_to_stoch_rsi_k.compute_sma( + starting_indexes.dateindex, + &self.dateindex_to_stoch_rsi, + 3, + exit, + )?; + + self.dateindex_to_stoch_rsi_d.compute_sma( + starting_indexes.dateindex, + &self.dateindex_to_stoch_rsi_k, + 3, + exit, + )?; + + // Stochastic Oscillator: K = (close - low_14) / (high_14 - low_14) * 100 + if let (Some(close), Some(low_2w), Some(high_2w)) = ( + price.usd.timeindexes_to_price_close.dateindex.as_ref(), + range.indexes_to_price_2w_min.dateindex.as_ref(), + range.indexes_to_price_2w_max.dateindex.as_ref(), + ) { + self.dateindex_to_stoch_k.compute_transform3( + starting_indexes.dateindex, + close, + low_2w, + high_2w, + |(i, close, low, high, ..)| { + let range = *high - *low; + let stoch = if range == 0.0 { + StoredF32::from(50.0) + } else { + StoredF32::from((**close - *low) / range * 100.0) + }; + (i, stoch) + }, + exit, + )?; + + self.dateindex_to_stoch_d.compute_sma( + starting_indexes.dateindex, + &self.dateindex_to_stoch_k, + 3, + exit, + )?; + } + + let amount_range = &distribution.utxo_cohorts.amount_range; + let supply_vecs: Vec<_> = amount_range + .iter() + .filter_map(|c| c.metrics.supply.indexes_to_supply.sats.dateindex.as_ref()) + .collect(); + let count_vecs: Vec<_> = amount_range + .iter() + .filter_map(|c| { + c.metrics + .supply + .indexes_to_utxo_count + .dateindex + .last + .as_ref() + }) + .collect(); + + if let Some(first_supply) = supply_vecs.first() + && supply_vecs.len() == count_vecs.len() + { + let version = supply_vecs + .iter() + .fold(Version::ZERO, |acc, v| acc + v.version()) + + count_vecs + .iter() + .fold(Version::ZERO, |acc, v| acc + v.version()); + let mut supply_iters: Vec<_> = supply_vecs.iter().map(|v| v.into_iter()).collect(); + let mut count_iters: Vec<_> = count_vecs.iter().map(|v| v.into_iter()).collect(); + + self.dateindex_to_gini.compute_to( + starting_indexes.dateindex, + first_supply.len(), + version, + |dateindex| { + let buckets: Vec<(u64, u64)> = supply_iters + .iter_mut() + .zip(count_iters.iter_mut()) + .map(|(s, c)| { + let count: u64 = *c.get_unwrap(dateindex); + let supply: u64 = *s.get_unwrap(dateindex); + (count, supply) + }) + .collect(); + (dateindex, StoredF32::from(gini_from_lorenz(&buckets))) + }, + exit, + )?; + } + + Ok(()) + } +} + +fn gini_from_lorenz(buckets: &[(u64, u64)]) -> f32 { + let total_count: u64 = buckets.iter().map(|(c, _)| c).sum(); + let total_supply: u64 = buckets.iter().map(|(_, s)| s).sum(); + + if total_count == 0 || total_supply == 0 { + return 0.0; + } + + let (mut cum_count, mut cum_supply, mut area) = (0u64, 0u64, 0.0f64); + let (tc, ts) = (total_count as f64, total_supply as f64); + + for &(count, supply) in buckets { + let (p0, w0) = (cum_count as f64 / tc, cum_supply as f64 / ts); + cum_count += count; + cum_supply += supply; + let (p1, w1) = (cum_count as f64 / tc, cum_supply as f64 / ts); + area += (p1 - p0) * (w0 + w1) / 2.0; + } + + (1.0 - 2.0 * area) as f32 +} diff --git a/crates/brk_computer/src/market/indicators/import.rs b/crates/brk_computer/src/market/indicators/import.rs new file mode 100644 index 000000000..020c57c95 --- /dev/null +++ b/crates/brk_computer/src/market/indicators/import.rs @@ -0,0 +1,128 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom2}; + +use super::{super::moving_average, Vecs}; +use crate::{ + distribution, indexes, + internal::{ + ComputedVecsFromDateIndex, DifferenceF32, LazyVecsFrom2FromDateIndex, Ratio32, RsiFormula, + Source, VecBuilderOptions, + }, + transactions, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + compute_dollars: bool, + distribution: &distribution::Vecs, + transactions: &transactions::Vecs, + moving_average: &moving_average::Vecs, + ) -> Result { + let v0 = Version::ZERO; + let last = || VecBuilderOptions::default().add_last(); + + let indexes_to_nvt = distribution + .utxo_cohorts + .all + .metrics + .supply + .indexes_to_supply + .dollars + .as_ref() + .zip(transactions.volume.indexes_to_sent_sum.dollars.as_ref()) + .map(|(market_cap, volume)| { + LazyVecsFrom2FromDateIndex::from_dateindex_and_height::( + "nvt", + version + v0, + market_cap, + volume, + ) + }); + + let dateindex_to_rsi_gains = EagerVec::forced_import(db, "rsi_gains", version + v0)?; + let dateindex_to_rsi_losses = EagerVec::forced_import(db, "rsi_losses", version + v0)?; + let dateindex_to_rsi_avg_gain_14d = + EagerVec::forced_import(db, "rsi_avg_gain_14d", version + v0)?; + let dateindex_to_rsi_avg_loss_14d = + EagerVec::forced_import(db, "rsi_avg_loss_14d", version + v0)?; + let dateindex_to_rsi_14d = LazyVecFrom2::transformed::( + "rsi_14d", + version + v0, + dateindex_to_rsi_avg_gain_14d.boxed_clone(), + dateindex_to_rsi_avg_loss_14d.boxed_clone(), + ); + + let dateindex_to_macd_line = EagerVec::forced_import(db, "macd_line", version + v0)?; + let dateindex_to_macd_signal = EagerVec::forced_import(db, "macd_signal", version + v0)?; + let dateindex_to_macd_histogram = LazyVecFrom2::transformed::( + "macd_histogram", + version + v0, + dateindex_to_macd_line.boxed_clone(), + dateindex_to_macd_signal.boxed_clone(), + ); + + let dateindex_to_rsi_14d_min = EagerVec::forced_import(db, "rsi_14d_min", version + v0)?; + let dateindex_to_rsi_14d_max = EagerVec::forced_import(db, "rsi_14d_max", version + v0)?; + let dateindex_to_stoch_rsi = EagerVec::forced_import(db, "stoch_rsi", version + v0)?; + let dateindex_to_stoch_rsi_k = EagerVec::forced_import(db, "stoch_rsi_k", version + v0)?; + let dateindex_to_stoch_rsi_d = EagerVec::forced_import(db, "stoch_rsi_d", version + v0)?; + + let dateindex_to_stoch_k = EagerVec::forced_import(db, "stoch_k", version + v0)?; + let dateindex_to_stoch_d = EagerVec::forced_import(db, "stoch_d", version + v0)?; + + let dateindex_to_gini = EagerVec::forced_import(db, "gini", version + v0)?; + + // Pi Cycle Top: 111d SMA / (2 * 350d SMA) - signals top when > 1 + let dateindex_to_pi_cycle = moving_average + .indexes_to_price_111d_sma + .price + .as_ref() + .and_then(|sma_111| sma_111.dateindex.as_ref()) + .zip(moving_average.indexes_to_price_350d_sma_x2.dateindex.as_ref()) + .map(|(sma_111, sma_350_x2)| { + LazyVecFrom2::transformed::( + "pi_cycle", + version + v0, + sma_111.boxed_clone(), + sma_350_x2.boxed_clone(), + ) + }); + + Ok(Self { + indexes_to_puell_multiple: compute_dollars + .then(|| { + ComputedVecsFromDateIndex::forced_import( + db, + "puell_multiple", + Source::Compute, + version + v0, + indexes, + last(), + ) + }) + .transpose()?, + indexes_to_nvt, + dateindex_to_rsi_gains, + dateindex_to_rsi_losses, + dateindex_to_rsi_avg_gain_14d, + dateindex_to_rsi_avg_loss_14d, + dateindex_to_rsi_14d, + dateindex_to_rsi_14d_min, + dateindex_to_rsi_14d_max, + dateindex_to_stoch_rsi, + dateindex_to_stoch_rsi_k, + dateindex_to_stoch_rsi_d, + dateindex_to_stoch_k, + dateindex_to_stoch_d, + dateindex_to_pi_cycle, + dateindex_to_macd_line, + dateindex_to_macd_signal, + dateindex_to_macd_histogram, + dateindex_to_gini, + }) + } +} diff --git a/crates/brk_computer/src/market/indicators/mod.rs b/crates/brk_computer/src/market/indicators/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/market/indicators/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/market/indicators/vecs.rs b/crates/brk_computer/src/market/indicators/vecs.rs new file mode 100644 index 000000000..9dbb4c819 --- /dev/null +++ b/crates/brk_computer/src/market/indicators/vecs.rs @@ -0,0 +1,34 @@ +use brk_traversable::Traversable; +use brk_types::{DateIndex, Dollars, StoredF32}; +use vecdb::{EagerVec, LazyVecFrom2, PcoVec}; + +use crate::internal::{ComputedVecsFromDateIndex, LazyVecsFrom2FromDateIndex}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_puell_multiple: Option>, + pub indexes_to_nvt: Option>, + + pub dateindex_to_rsi_gains: EagerVec>, + pub dateindex_to_rsi_losses: EagerVec>, + pub dateindex_to_rsi_avg_gain_14d: EagerVec>, + pub dateindex_to_rsi_avg_loss_14d: EagerVec>, + pub dateindex_to_rsi_14d: LazyVecFrom2, + + pub dateindex_to_rsi_14d_min: EagerVec>, + pub dateindex_to_rsi_14d_max: EagerVec>, + pub dateindex_to_stoch_rsi: EagerVec>, + pub dateindex_to_stoch_rsi_k: EagerVec>, + pub dateindex_to_stoch_rsi_d: EagerVec>, + + pub dateindex_to_stoch_k: EagerVec>, + pub dateindex_to_stoch_d: EagerVec>, + + pub dateindex_to_pi_cycle: Option>, + + pub dateindex_to_macd_line: EagerVec>, + pub dateindex_to_macd_signal: EagerVec>, + pub dateindex_to_macd_histogram: LazyVecFrom2, + + pub dateindex_to_gini: EagerVec>, +} diff --git a/crates/brk_computer/src/market/history/compute.rs b/crates/brk_computer/src/market/lookback/compute.rs similarity index 56% rename from crates/brk_computer/src/market/history/compute.rs rename to crates/brk_computer/src/market/lookback/compute.rs index c7ce2b730..3d1749c4b 100644 --- a/crates/brk_computer/src/market/history/compute.rs +++ b/crates/brk_computer/src/market/lookback/compute.rs @@ -11,7 +11,7 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close = price.timeindexes_to_price_close.dateindex.u(); + let close = price.usd.timeindexes_to_price_close.dateindex.u(); self.price_1d_ago.compute_all(starting_indexes, exit, |v| { v.compute_previous_value(starting_indexes.dateindex, close, 1, exit)?; @@ -79,71 +79,6 @@ impl Vecs { Ok(()) })?; - // CAGR computed from returns - self._2y_cagr.compute_all(starting_indexes, exit, |v| { - v.compute_cagr( - starting_indexes.dateindex, - self._2y_price_returns.dateindex.u(), - 2 * 365, - exit, - )?; - Ok(()) - })?; - self._3y_cagr.compute_all(starting_indexes, exit, |v| { - v.compute_cagr( - starting_indexes.dateindex, - self._3y_price_returns.dateindex.u(), - 3 * 365, - exit, - )?; - Ok(()) - })?; - self._4y_cagr.compute_all(starting_indexes, exit, |v| { - v.compute_cagr( - starting_indexes.dateindex, - self._4y_price_returns.dateindex.u(), - 4 * 365, - exit, - )?; - Ok(()) - })?; - self._5y_cagr.compute_all(starting_indexes, exit, |v| { - v.compute_cagr( - starting_indexes.dateindex, - self._5y_price_returns.dateindex.u(), - 5 * 365, - exit, - )?; - Ok(()) - })?; - self._6y_cagr.compute_all(starting_indexes, exit, |v| { - v.compute_cagr( - starting_indexes.dateindex, - self._6y_price_returns.dateindex.u(), - 6 * 365, - exit, - )?; - Ok(()) - })?; - self._8y_cagr.compute_all(starting_indexes, exit, |v| { - v.compute_cagr( - starting_indexes.dateindex, - self._8y_price_returns.dateindex.u(), - 8 * 365, - exit, - )?; - Ok(()) - })?; - self._10y_cagr.compute_all(starting_indexes, exit, |v| { - v.compute_cagr( - starting_indexes.dateindex, - self._10y_price_returns.dateindex.u(), - 10 * 365, - exit, - )?; - Ok(()) - })?; - Ok(()) } } diff --git a/crates/brk_computer/src/market/lookback/import.rs b/crates/brk_computer/src/market/lookback/import.rs new file mode 100644 index 000000000..f4d926a08 --- /dev/null +++ b/crates/brk_computer/src/market/lookback/import.rs @@ -0,0 +1,141 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromDateIndex, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v0 = Version::ZERO; + let last = VecBuilderOptions::default().add_last(); + + let price_1d_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_1d_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_1w_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_1w_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_1m_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_1m_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_3m_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_3m_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_6m_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_6m_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_1y_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_1y_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_2y_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_2y_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_3y_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_3y_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_4y_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_4y_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_5y_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_5y_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_6y_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_6y_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_8y_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_8y_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + let price_10y_ago = ComputedVecsFromDateIndex::forced_import( + db, + "price_10y_ago", + Source::Compute, + version + v0, + indexes, + last, + )?; + + Ok(Self { + price_1d_ago, + price_1w_ago, + price_1m_ago, + price_3m_ago, + price_6m_ago, + price_1y_ago, + price_2y_ago, + price_3y_ago, + price_4y_ago, + price_5y_ago, + price_6y_ago, + price_8y_ago, + price_10y_ago, + }) + } +} diff --git a/crates/brk_computer/src/market/lookback/mod.rs b/crates/brk_computer/src/market/lookback/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/market/lookback/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/market/lookback/vecs.rs b/crates/brk_computer/src/market/lookback/vecs.rs new file mode 100644 index 000000000..d873f0d8a --- /dev/null +++ b/crates/brk_computer/src/market/lookback/vecs.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::Dollars; + +use crate::internal::ComputedVecsFromDateIndex; + +/// Price lookback metrics +#[derive(Clone, Traversable)] +pub struct Vecs { + pub price_1d_ago: ComputedVecsFromDateIndex, + pub price_1w_ago: ComputedVecsFromDateIndex, + pub price_1m_ago: ComputedVecsFromDateIndex, + pub price_3m_ago: ComputedVecsFromDateIndex, + pub price_6m_ago: ComputedVecsFromDateIndex, + pub price_1y_ago: ComputedVecsFromDateIndex, + pub price_2y_ago: ComputedVecsFromDateIndex, + pub price_3y_ago: ComputedVecsFromDateIndex, + pub price_4y_ago: ComputedVecsFromDateIndex, + pub price_5y_ago: ComputedVecsFromDateIndex, + pub price_6y_ago: ComputedVecsFromDateIndex, + pub price_8y_ago: ComputedVecsFromDateIndex, + pub price_10y_ago: ComputedVecsFromDateIndex, +} diff --git a/crates/brk_computer/src/market/mod.rs b/crates/brk_computer/src/market/mod.rs index 9610beb02..4343b8625 100644 --- a/crates/brk_computer/src/market/mod.rs +++ b/crates/brk_computer/src/market/mod.rs @@ -1,10 +1,12 @@ pub mod ath; mod compute; pub mod dca; -pub mod history; mod import; +pub mod indicators; +pub mod lookback; pub mod moving_average; pub mod range; +pub mod returns; pub mod volatility; use brk_traversable::Traversable; @@ -12,9 +14,11 @@ use vecdb::Database; pub use ath::Vecs as AthVecs; pub use dca::Vecs as DcaVecs; -pub use history::Vecs as HistoryVecs; +pub use indicators::Vecs as IndicatorsVecs; +pub use lookback::Vecs as LookbackVecs; pub use moving_average::Vecs as MovingAverageVecs; pub use range::Vecs as RangeVecs; +pub use returns::Vecs as ReturnsVecs; pub use volatility::Vecs as VolatilityVecs; pub const DB_NAME: &str = "market"; @@ -25,9 +29,11 @@ pub struct Vecs { #[traversable(skip)] pub(crate) db: Database, pub ath: AthVecs, + pub lookback: LookbackVecs, + pub returns: ReturnsVecs, pub volatility: VolatilityVecs, pub range: RangeVecs, pub moving_average: MovingAverageVecs, - pub history: HistoryVecs, pub dca: DcaVecs, + pub indicators: IndicatorsVecs, } diff --git a/crates/brk_computer/src/market/moving_average/compute.rs b/crates/brk_computer/src/market/moving_average/compute.rs index 698c2063c..d4d96b3c8 100644 --- a/crates/brk_computer/src/market/moving_average/compute.rs +++ b/crates/brk_computer/src/market/moving_average/compute.rs @@ -11,177 +11,55 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close = price.timeindexes_to_price_close.dateindex.u(); + let close = price.usd.timeindexes_to_price_close.dateindex.u(); - // SMAs - self.indexes_to_price_1w_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 7, exit)?; + for (sma, period) in [ + (&mut self.indexes_to_price_1w_sma, 7), + (&mut self.indexes_to_price_8d_sma, 8), + (&mut self.indexes_to_price_13d_sma, 13), + (&mut self.indexes_to_price_21d_sma, 21), + (&mut self.indexes_to_price_1m_sma, 30), + (&mut self.indexes_to_price_34d_sma, 34), + (&mut self.indexes_to_price_55d_sma, 55), + (&mut self.indexes_to_price_89d_sma, 89), + (&mut self.indexes_to_price_111d_sma, 111), + (&mut self.indexes_to_price_144d_sma, 144), + (&mut self.indexes_to_price_200d_sma, 200), + (&mut self.indexes_to_price_350d_sma, 350), + (&mut self.indexes_to_price_1y_sma, 365), + (&mut self.indexes_to_price_2y_sma, 2 * 365), + (&mut self.indexes_to_price_200w_sma, 200 * 7), + (&mut self.indexes_to_price_4y_sma, 4 * 365), + ] { + sma.compute_all(price, starting_indexes, exit, |v| { + v.compute_sma(starting_indexes.dateindex, close, period, exit)?; Ok(()) })?; + } - self.indexes_to_price_8d_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 8, exit)?; - Ok(()) - })?; - - self.indexes_to_price_13d_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 13, exit)?; - Ok(()) - })?; - - self.indexes_to_price_21d_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 21, exit)?; - Ok(()) - })?; - - self.indexes_to_price_1m_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 30, exit)?; - Ok(()) - })?; - - self.indexes_to_price_34d_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 34, exit)?; - Ok(()) - })?; - - self.indexes_to_price_55d_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 55, exit)?; - Ok(()) - })?; - - self.indexes_to_price_89d_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 89, exit)?; - Ok(()) - })?; - - self.indexes_to_price_144d_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 144, exit)?; - Ok(()) - })?; - - self.indexes_to_price_200d_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 200, exit)?; - Ok(()) - })?; - - self.indexes_to_price_1y_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 365, exit)?; - Ok(()) - })?; - - self.indexes_to_price_2y_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 2 * 365, exit)?; - Ok(()) - })?; - - self.indexes_to_price_200w_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 200 * 7, exit)?; - Ok(()) - })?; - - self.indexes_to_price_4y_sma - .compute_all(price, starting_indexes, exit, |v| { - v.compute_sma(starting_indexes.dateindex, close, 4 * 365, exit)?; - Ok(()) - })?; - - // EMAs - self.indexes_to_price_1w_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 7, exit)?; - Ok(()) - })?; - - self.indexes_to_price_8d_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 8, exit)?; - Ok(()) - })?; - - self.indexes_to_price_13d_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 13, exit)?; - Ok(()) - })?; - - self.indexes_to_price_21d_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 21, exit)?; - Ok(()) - })?; - - self.indexes_to_price_1m_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 30, exit)?; - Ok(()) - })?; - - self.indexes_to_price_34d_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 34, exit)?; - Ok(()) - })?; - - self.indexes_to_price_55d_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 55, exit)?; - Ok(()) - })?; - - self.indexes_to_price_89d_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 89, exit)?; - Ok(()) - })?; - - self.indexes_to_price_144d_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 144, exit)?; - Ok(()) - })?; - - self.indexes_to_price_200d_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 200, exit)?; - Ok(()) - })?; - - self.indexes_to_price_1y_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 365, exit)?; - Ok(()) - })?; - - self.indexes_to_price_2y_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 2 * 365, exit)?; - Ok(()) - })?; - - self.indexes_to_price_200w_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 200 * 7, exit)?; - Ok(()) - })?; - - self.indexes_to_price_4y_ema - .compute_all(price, starting_indexes, exit, |v| { - v.compute_ema(starting_indexes.dateindex, close, 4 * 365, exit)?; + for (ema, period) in [ + (&mut self.indexes_to_price_1w_ema, 7), + (&mut self.indexes_to_price_8d_ema, 8), + (&mut self.indexes_to_price_12d_ema, 12), + (&mut self.indexes_to_price_13d_ema, 13), + (&mut self.indexes_to_price_21d_ema, 21), + (&mut self.indexes_to_price_26d_ema, 26), + (&mut self.indexes_to_price_1m_ema, 30), + (&mut self.indexes_to_price_34d_ema, 34), + (&mut self.indexes_to_price_55d_ema, 55), + (&mut self.indexes_to_price_89d_ema, 89), + (&mut self.indexes_to_price_144d_ema, 144), + (&mut self.indexes_to_price_200d_ema, 200), + (&mut self.indexes_to_price_1y_ema, 365), + (&mut self.indexes_to_price_2y_ema, 2 * 365), + (&mut self.indexes_to_price_200w_ema, 200 * 7), + (&mut self.indexes_to_price_4y_ema, 4 * 365), + ] { + ema.compute_all(price, starting_indexes, exit, |v| { + v.compute_ema(starting_indexes.dateindex, close, period, exit)?; Ok(()) })?; + } Ok(()) } diff --git a/crates/brk_computer/src/market/moving_average/import.rs b/crates/brk_computer/src/market/moving_average/import.rs index c88fd02e3..6d5127589 100644 --- a/crates/brk_computer/src/market/moving_average/import.rs +++ b/crates/brk_computer/src/market/moving_average/import.rs @@ -4,7 +4,7 @@ use vecdb::{Database, IterableCloneableVec}; use super::Vecs; use crate::{ - grouped::{ComputedRatioVecsFromDateIndex, DollarsTimesTenths, LazyVecsFromDateIndex}, + internal::{ComputedRatioVecsFromDateIndex, DollarsTimesTenths, LazyVecsFromDateIndex}, indexes, price, }; @@ -89,6 +89,15 @@ impl Vecs { true, price, )?; + let indexes_to_price_111d_sma = ComputedRatioVecsFromDateIndex::forced_import( + db, + "price_111d_sma", + None, + version + v0, + indexes, + true, + price, + )?; let indexes_to_price_144d_sma = ComputedRatioVecsFromDateIndex::forced_import( db, "price_144d_sma", @@ -107,6 +116,15 @@ impl Vecs { true, price, )?; + let indexes_to_price_350d_sma = ComputedRatioVecsFromDateIndex::forced_import( + db, + "price_350d_sma", + None, + version + v0, + indexes, + true, + price, + )?; let indexes_to_price_1y_sma = ComputedRatioVecsFromDateIndex::forced_import( db, "price_1y_sma", @@ -162,6 +180,15 @@ impl Vecs { true, price, )?; + let indexes_to_price_12d_ema = ComputedRatioVecsFromDateIndex::forced_import( + db, + "price_12d_ema", + None, + version + v0, + indexes, + true, + price, + )?; let indexes_to_price_13d_ema = ComputedRatioVecsFromDateIndex::forced_import( db, "price_13d_ema", @@ -180,6 +207,15 @@ impl Vecs { true, price, )?; + let indexes_to_price_26d_ema = ComputedRatioVecsFromDateIndex::forced_import( + db, + "price_26d_ema", + None, + version + v0, + indexes, + true, + price, + )?; let indexes_to_price_1m_ema = ComputedRatioVecsFromDateIndex::forced_import( db, "price_1m_ema", @@ -293,6 +329,18 @@ impl Vecs { price_200d_sma_source, ); + let price_350d_sma_source = indexes_to_price_350d_sma.price.as_ref().unwrap(); + let indexes_to_price_350d_sma_x2 = + LazyVecsFromDateIndex::from_computed::>( + "price_350d_sma_x2", + version + v0, + price_350d_sma_source + .dateindex + .as_ref() + .map(|v| v.boxed_clone()), + price_350d_sma_source, + ); + Ok(Self { indexes_to_price_1w_sma, indexes_to_price_8d_sma, @@ -302,8 +350,10 @@ impl Vecs { indexes_to_price_34d_sma, indexes_to_price_55d_sma, indexes_to_price_89d_sma, + indexes_to_price_111d_sma, indexes_to_price_144d_sma, indexes_to_price_200d_sma, + indexes_to_price_350d_sma, indexes_to_price_1y_sma, indexes_to_price_2y_sma, indexes_to_price_200w_sma, @@ -311,8 +361,10 @@ impl Vecs { indexes_to_price_1w_ema, indexes_to_price_8d_ema, + indexes_to_price_12d_ema, indexes_to_price_13d_ema, indexes_to_price_21d_ema, + indexes_to_price_26d_ema, indexes_to_price_1m_ema, indexes_to_price_34d_ema, indexes_to_price_55d_ema, @@ -326,6 +378,7 @@ impl Vecs { indexes_to_price_200d_sma_x2_4, indexes_to_price_200d_sma_x0_8, + indexes_to_price_350d_sma_x2, }) } } diff --git a/crates/brk_computer/src/market/moving_average/vecs.rs b/crates/brk_computer/src/market/moving_average/vecs.rs index 77065f164..e49232adf 100644 --- a/crates/brk_computer/src/market/moving_average/vecs.rs +++ b/crates/brk_computer/src/market/moving_average/vecs.rs @@ -1,7 +1,7 @@ use brk_traversable::Traversable; use brk_types::Dollars; -use crate::grouped::{ComputedRatioVecsFromDateIndex, LazyVecsFromDateIndex}; +use crate::internal::{ComputedRatioVecsFromDateIndex, LazyVecsFromDateIndex}; /// Simple and exponential moving average metrics #[derive(Clone, Traversable)] @@ -14,8 +14,10 @@ pub struct Vecs { pub indexes_to_price_34d_sma: ComputedRatioVecsFromDateIndex, pub indexes_to_price_55d_sma: ComputedRatioVecsFromDateIndex, pub indexes_to_price_89d_sma: ComputedRatioVecsFromDateIndex, + pub indexes_to_price_111d_sma: ComputedRatioVecsFromDateIndex, pub indexes_to_price_144d_sma: ComputedRatioVecsFromDateIndex, pub indexes_to_price_200d_sma: ComputedRatioVecsFromDateIndex, + pub indexes_to_price_350d_sma: ComputedRatioVecsFromDateIndex, pub indexes_to_price_1y_sma: ComputedRatioVecsFromDateIndex, pub indexes_to_price_2y_sma: ComputedRatioVecsFromDateIndex, pub indexes_to_price_200w_sma: ComputedRatioVecsFromDateIndex, @@ -23,8 +25,10 @@ pub struct Vecs { pub indexes_to_price_1w_ema: ComputedRatioVecsFromDateIndex, pub indexes_to_price_8d_ema: ComputedRatioVecsFromDateIndex, + pub indexes_to_price_12d_ema: ComputedRatioVecsFromDateIndex, pub indexes_to_price_13d_ema: ComputedRatioVecsFromDateIndex, pub indexes_to_price_21d_ema: ComputedRatioVecsFromDateIndex, + pub indexes_to_price_26d_ema: ComputedRatioVecsFromDateIndex, pub indexes_to_price_1m_ema: ComputedRatioVecsFromDateIndex, pub indexes_to_price_34d_ema: ComputedRatioVecsFromDateIndex, pub indexes_to_price_55d_ema: ComputedRatioVecsFromDateIndex, @@ -38,4 +42,5 @@ pub struct Vecs { pub indexes_to_price_200d_sma_x2_4: LazyVecsFromDateIndex, pub indexes_to_price_200d_sma_x0_8: LazyVecsFromDateIndex, + pub indexes_to_price_350d_sma_x2: LazyVecsFromDateIndex, } diff --git a/crates/brk_computer/src/market/range/compute.rs b/crates/brk_computer/src/market/range/compute.rs index d41e2fd1e..c8ffc4ad1 100644 --- a/crates/brk_computer/src/market/range/compute.rs +++ b/crates/brk_computer/src/market/range/compute.rs @@ -12,9 +12,9 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let open = price.timeindexes_to_price_open.dateindex.u(); - let low = price.timeindexes_to_price_low.dateindex.u(); - let high = price.timeindexes_to_price_high.dateindex.u(); + let open = price.usd.timeindexes_to_price_open.dateindex.u(); + let low = price.usd.timeindexes_to_price_low.dateindex.u(); + let high = price.usd.timeindexes_to_price_high.dateindex.u(); self.indexes_to_price_1w_min .compute_all(starting_indexes, exit, |v| { diff --git a/crates/brk_computer/src/market/range/import.rs b/crates/brk_computer/src/market/range/import.rs index d584faec7..854c099a8 100644 --- a/crates/brk_computer/src/market/range/import.rs +++ b/crates/brk_computer/src/market/range/import.rs @@ -4,7 +4,7 @@ use vecdb::{Database, EagerVec, ImportableVec}; use super::Vecs; use crate::{ - grouped::{ComputedVecsFromDateIndex, Source, VecBuilderOptions}, + internal::{ComputedVecsFromDateIndex, Source, VecBuilderOptions}, indexes, }; diff --git a/crates/brk_computer/src/market/range/vecs.rs b/crates/brk_computer/src/market/range/vecs.rs index 077f4391f..667a820ed 100644 --- a/crates/brk_computer/src/market/range/vecs.rs +++ b/crates/brk_computer/src/market/range/vecs.rs @@ -2,7 +2,7 @@ use brk_traversable::Traversable; use brk_types::{DateIndex, Dollars, StoredF32}; use vecdb::{EagerVec, PcoVec}; -use crate::grouped::ComputedVecsFromDateIndex; +use crate::internal::ComputedVecsFromDateIndex; /// Price range and choppiness metrics #[derive(Clone, Traversable)] diff --git a/crates/brk_computer/src/market/returns/compute.rs b/crates/brk_computer/src/market/returns/compute.rs new file mode 100644 index 000000000..f48d5ff11 --- /dev/null +++ b/crates/brk_computer/src/market/returns/compute.rs @@ -0,0 +1,121 @@ +use brk_error::Result; +use brk_types::StoredF32; +use vecdb::Exit; + +use super::Vecs; +use crate::{utils::OptionExt, ComputeIndexes}; + +impl Vecs { + pub fn compute(&mut self, starting_indexes: &ComputeIndexes, exit: &Exit) -> Result<()> { + // CAGR computed from returns + self._2y_cagr.compute_all(starting_indexes, exit, |v| { + v.compute_cagr( + starting_indexes.dateindex, + self._2y_price_returns.dateindex.u(), + 2 * 365, + exit, + )?; + Ok(()) + })?; + self._3y_cagr.compute_all(starting_indexes, exit, |v| { + v.compute_cagr( + starting_indexes.dateindex, + self._3y_price_returns.dateindex.u(), + 3 * 365, + exit, + )?; + Ok(()) + })?; + self._4y_cagr.compute_all(starting_indexes, exit, |v| { + v.compute_cagr( + starting_indexes.dateindex, + self._4y_price_returns.dateindex.u(), + 4 * 365, + exit, + )?; + Ok(()) + })?; + self._5y_cagr.compute_all(starting_indexes, exit, |v| { + v.compute_cagr( + starting_indexes.dateindex, + self._5y_price_returns.dateindex.u(), + 5 * 365, + exit, + )?; + Ok(()) + })?; + self._6y_cagr.compute_all(starting_indexes, exit, |v| { + v.compute_cagr( + starting_indexes.dateindex, + self._6y_price_returns.dateindex.u(), + 6 * 365, + exit, + )?; + Ok(()) + })?; + self._8y_cagr.compute_all(starting_indexes, exit, |v| { + v.compute_cagr( + starting_indexes.dateindex, + self._8y_price_returns.dateindex.u(), + 8 * 365, + exit, + )?; + Ok(()) + })?; + self._10y_cagr.compute_all(starting_indexes, exit, |v| { + v.compute_cagr( + starting_indexes.dateindex, + self._10y_price_returns.dateindex.u(), + 10 * 365, + exit, + )?; + Ok(()) + })?; + + // Returns standard deviation (computed from 1d returns) + let _1d_price_returns_dateindex = self._1d_price_returns.dateindex.u(); + + self.indexes_to_1d_returns_1w_sd.compute_all( + starting_indexes, + exit, + _1d_price_returns_dateindex, + )?; + self.indexes_to_1d_returns_1m_sd.compute_all( + starting_indexes, + exit, + _1d_price_returns_dateindex, + )?; + self.indexes_to_1d_returns_1y_sd.compute_all( + starting_indexes, + exit, + _1d_price_returns_dateindex, + )?; + + // Downside returns: min(return, 0) + self.dateindex_to_downside_returns.compute_transform( + starting_indexes.dateindex, + _1d_price_returns_dateindex, + |(i, ret, ..)| (i, StoredF32::from((*ret).min(0.0))), + exit, + )?; + + // Downside deviation (SD of downside returns) + self.indexes_to_downside_1w_sd.compute_all( + starting_indexes, + exit, + &self.dateindex_to_downside_returns, + )?; + self.indexes_to_downside_1m_sd.compute_all( + starting_indexes, + exit, + &self.dateindex_to_downside_returns, + )?; + self.indexes_to_downside_1y_sd.compute_all( + starting_indexes, + exit, + &self.dateindex_to_downside_returns, + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/market/returns/import.rs b/crates/brk_computer/src/market/returns/import.rs new file mode 100644 index 000000000..3e1f6b678 --- /dev/null +++ b/crates/brk_computer/src/market/returns/import.rs @@ -0,0 +1,281 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, EagerVec, ImportableVec}; + +use super::super::lookback; +use super::Vecs; +use crate::{ + internal::{ + ComputedStandardDeviationVecsFromDateIndex, ComputedVecsFromDateIndex, + LazyVecsFrom2FromDateIndex, PercentageDiffCloseDollars, Source, + StandardDeviationVecsOptions, VecBuilderOptions, + }, + indexes, price, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + price: &price::Vecs, + lookback: &lookback::Vecs, + ) -> Result { + let v0 = Version::ZERO; + let v1 = Version::ONE; + let last = VecBuilderOptions::default().add_last(); + + // Price returns (lazy, from price.close and lookback.price_*_ago) + let _1d_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "1d_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_1d_ago, + ); + let _1w_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "1w_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_1w_ago, + ); + let _1m_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "1m_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_1m_ago, + ); + let _3m_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "3m_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_3m_ago, + ); + let _6m_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "6m_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_6m_ago, + ); + let _1y_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "1y_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_1y_ago, + ); + let _2y_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "2y_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_2y_ago, + ); + let _3y_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "3y_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_3y_ago, + ); + let _4y_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "4y_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_4y_ago, + ); + let _5y_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "5y_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_5y_ago, + ); + let _6y_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "6y_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_6y_ago, + ); + let _8y_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "8y_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_8y_ago, + ); + let _10y_price_returns = + LazyVecsFrom2FromDateIndex::from_computed::( + "10y_price_returns", + version + v0, + &price.usd.timeindexes_to_price_close, + &lookback.price_10y_ago, + ); + + // CAGR (computed) + let _2y_cagr = ComputedVecsFromDateIndex::forced_import( + db, + "2y_cagr", + Source::Compute, + version + v0, + indexes, + last, + )?; + let _3y_cagr = ComputedVecsFromDateIndex::forced_import( + db, + "3y_cagr", + Source::Compute, + version + v0, + indexes, + last, + )?; + let _4y_cagr = ComputedVecsFromDateIndex::forced_import( + db, + "4y_cagr", + Source::Compute, + version + v0, + indexes, + last, + )?; + let _5y_cagr = ComputedVecsFromDateIndex::forced_import( + db, + "5y_cagr", + Source::Compute, + version + v0, + indexes, + last, + )?; + let _6y_cagr = ComputedVecsFromDateIndex::forced_import( + db, + "6y_cagr", + Source::Compute, + version + v0, + indexes, + last, + )?; + let _8y_cagr = ComputedVecsFromDateIndex::forced_import( + db, + "8y_cagr", + Source::Compute, + version + v0, + indexes, + last, + )?; + let _10y_cagr = ComputedVecsFromDateIndex::forced_import( + db, + "10y_cagr", + Source::Compute, + version + v0, + indexes, + last, + )?; + + // Returns standard deviation (computed from 1d returns) + let indexes_to_1d_returns_1w_sd = + ComputedStandardDeviationVecsFromDateIndex::forced_import( + db, + "1d_returns_1w_sd", + 7, + Source::Compute, + version + v1, + indexes, + StandardDeviationVecsOptions::default(), + None, + )?; + let indexes_to_1d_returns_1m_sd = + ComputedStandardDeviationVecsFromDateIndex::forced_import( + db, + "1d_returns_1m_sd", + 30, + Source::Compute, + version + v1, + indexes, + StandardDeviationVecsOptions::default(), + None, + )?; + let indexes_to_1d_returns_1y_sd = + ComputedStandardDeviationVecsFromDateIndex::forced_import( + db, + "1d_returns_1y_sd", + 365, + Source::Compute, + version + v1, + indexes, + StandardDeviationVecsOptions::default(), + None, + )?; + + // Downside returns and deviation (for Sortino ratio) + let dateindex_to_downside_returns = + EagerVec::forced_import(db, "downside_returns", version + v0)?; + let indexes_to_downside_1w_sd = ComputedStandardDeviationVecsFromDateIndex::forced_import( + db, + "downside_1w_sd", + 7, + Source::Compute, + version + v1, + indexes, + StandardDeviationVecsOptions::default(), + None, + )?; + let indexes_to_downside_1m_sd = ComputedStandardDeviationVecsFromDateIndex::forced_import( + db, + "downside_1m_sd", + 30, + Source::Compute, + version + v1, + indexes, + StandardDeviationVecsOptions::default(), + None, + )?; + let indexes_to_downside_1y_sd = ComputedStandardDeviationVecsFromDateIndex::forced_import( + db, + "downside_1y_sd", + 365, + Source::Compute, + version + v1, + indexes, + StandardDeviationVecsOptions::default(), + None, + )?; + + Ok(Self { + _1d_price_returns, + _1w_price_returns, + _1m_price_returns, + _3m_price_returns, + _6m_price_returns, + _1y_price_returns, + _2y_price_returns, + _3y_price_returns, + _4y_price_returns, + _5y_price_returns, + _6y_price_returns, + _8y_price_returns, + _10y_price_returns, + + _2y_cagr, + _3y_cagr, + _4y_cagr, + _5y_cagr, + _6y_cagr, + _8y_cagr, + _10y_cagr, + + indexes_to_1d_returns_1w_sd, + indexes_to_1d_returns_1m_sd, + indexes_to_1d_returns_1y_sd, + + dateindex_to_downside_returns, + indexes_to_downside_1w_sd, + indexes_to_downside_1m_sd, + indexes_to_downside_1y_sd, + }) + } +} diff --git a/crates/brk_computer/src/market/returns/mod.rs b/crates/brk_computer/src/market/returns/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/market/returns/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/market/history/vecs.rs b/crates/brk_computer/src/market/returns/vecs.rs similarity index 60% rename from crates/brk_computer/src/market/history/vecs.rs rename to crates/brk_computer/src/market/returns/vecs.rs index 956fac33b..7626dfd2b 100644 --- a/crates/brk_computer/src/market/history/vecs.rs +++ b/crates/brk_computer/src/market/returns/vecs.rs @@ -1,25 +1,16 @@ use brk_traversable::Traversable; -use brk_types::{Close, Dollars, StoredF32}; +use brk_types::{Close, DateIndex, Dollars, StoredF32}; +use vecdb::{EagerVec, PcoVec}; -use crate::grouped::{ComputedVecsFromDateIndex, LazyVecsFrom2FromDateIndex}; +use crate::internal::{ + ComputedStandardDeviationVecsFromDateIndex, ComputedVecsFromDateIndex, + LazyVecsFrom2FromDateIndex, +}; -/// Historical price lookback, returns, and CAGR metrics +/// Price returns, CAGR, and returns standard deviation metrics #[derive(Clone, Traversable)] pub struct Vecs { - pub price_1d_ago: ComputedVecsFromDateIndex, - pub price_1w_ago: ComputedVecsFromDateIndex, - pub price_1m_ago: ComputedVecsFromDateIndex, - pub price_3m_ago: ComputedVecsFromDateIndex, - pub price_6m_ago: ComputedVecsFromDateIndex, - pub price_1y_ago: ComputedVecsFromDateIndex, - pub price_2y_ago: ComputedVecsFromDateIndex, - pub price_3y_ago: ComputedVecsFromDateIndex, - pub price_4y_ago: ComputedVecsFromDateIndex, - pub price_5y_ago: ComputedVecsFromDateIndex, - pub price_6y_ago: ComputedVecsFromDateIndex, - pub price_8y_ago: ComputedVecsFromDateIndex, - pub price_10y_ago: ComputedVecsFromDateIndex, - + // Price returns (lazy, from price.close and history.price_*_ago) pub _1d_price_returns: LazyVecsFrom2FromDateIndex, Dollars>, pub _1w_price_returns: LazyVecsFrom2FromDateIndex, Dollars>, pub _1m_price_returns: LazyVecsFrom2FromDateIndex, Dollars>, @@ -34,6 +25,7 @@ pub struct Vecs { pub _8y_price_returns: LazyVecsFrom2FromDateIndex, Dollars>, pub _10y_price_returns: LazyVecsFrom2FromDateIndex, Dollars>, + // CAGR (computed from returns) pub _2y_cagr: ComputedVecsFromDateIndex, pub _3y_cagr: ComputedVecsFromDateIndex, pub _4y_cagr: ComputedVecsFromDateIndex, @@ -41,4 +33,15 @@ pub struct Vecs { pub _6y_cagr: ComputedVecsFromDateIndex, pub _8y_cagr: ComputedVecsFromDateIndex, pub _10y_cagr: ComputedVecsFromDateIndex, + + // Returns standard deviation (computed from 1d returns) + pub indexes_to_1d_returns_1w_sd: ComputedStandardDeviationVecsFromDateIndex, + pub indexes_to_1d_returns_1m_sd: ComputedStandardDeviationVecsFromDateIndex, + pub indexes_to_1d_returns_1y_sd: ComputedStandardDeviationVecsFromDateIndex, + + // Downside returns and deviation (for Sortino ratio) + pub dateindex_to_downside_returns: EagerVec>, + pub indexes_to_downside_1w_sd: ComputedStandardDeviationVecsFromDateIndex, + pub indexes_to_downside_1m_sd: ComputedStandardDeviationVecsFromDateIndex, + pub indexes_to_downside_1y_sd: ComputedStandardDeviationVecsFromDateIndex, } diff --git a/crates/brk_computer/src/market/volatility/compute.rs b/crates/brk_computer/src/market/volatility/compute.rs deleted file mode 100644 index 63120d825..000000000 --- a/crates/brk_computer/src/market/volatility/compute.rs +++ /dev/null @@ -1,38 +0,0 @@ -use brk_error::Result; -use brk_types::{DateIndex, StoredF32}; -use vecdb::{CollectableVec, Exit}; - -use super::Vecs; -use crate::ComputeIndexes; - -impl Vecs { - pub fn compute( - &mut self, - starting_indexes: &ComputeIndexes, - exit: &Exit, - _1d_price_returns_dateindex: &V, - ) -> Result<()> - where - V: CollectableVec, - { - self.indexes_to_1d_returns_1w_sd.compute_all( - starting_indexes, - exit, - _1d_price_returns_dateindex, - )?; - - self.indexes_to_1d_returns_1m_sd.compute_all( - starting_indexes, - exit, - _1d_price_returns_dateindex, - )?; - - self.indexes_to_1d_returns_1y_sd.compute_all( - starting_indexes, - exit, - _1d_price_returns_dateindex, - )?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/market/volatility/import.rs b/crates/brk_computer/src/market/volatility/import.rs index 1ed5767fa..06e3b3ba9 100644 --- a/crates/brk_computer/src/market/volatility/import.rs +++ b/crates/brk_computer/src/market/volatility/import.rs @@ -1,101 +1,150 @@ -use brk_error::Result; use brk_types::Version; -use vecdb::{Database, IterableCloneableVec}; +use vecdb::{IterableCloneableVec, LazyVecFrom2}; +use super::super::returns; use super::Vecs; -use crate::{ - grouped::{ - ComputedStandardDeviationVecsFromDateIndex, LazyVecsFromDateIndex, Source, - StandardDeviationVecsOptions, StoredF32TimesSqrt7, StoredF32TimesSqrt30, - StoredF32TimesSqrt365, - }, - indexes, +use crate::internal::{ + LazyVecsFromDateIndex, RatioF32, StoredF32TimesSqrt30, StoredF32TimesSqrt365, StoredF32TimesSqrt7, }; impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { - let v1 = Version::ONE; + pub fn forced_import(version: Version, returns: &returns::Vecs) -> Self { let v2 = Version::TWO; - let indexes_to_1d_returns_1w_sd = - ComputedStandardDeviationVecsFromDateIndex::forced_import( - db, - "1d_returns_1w_sd", - 7, - Source::Compute, - version + v1, - indexes, - StandardDeviationVecsOptions::default(), - None, - )?; - - let indexes_to_1d_returns_1m_sd = - ComputedStandardDeviationVecsFromDateIndex::forced_import( - db, - "1d_returns_1m_sd", - 30, - Source::Compute, - version + v1, - indexes, - StandardDeviationVecsOptions::default(), - None, - )?; - - let indexes_to_1d_returns_1y_sd = - ComputedStandardDeviationVecsFromDateIndex::forced_import( - db, - "1d_returns_1y_sd", - 365, - Source::Compute, - version + v1, - indexes, - StandardDeviationVecsOptions::default(), - None, - )?; - let indexes_to_price_1w_volatility = LazyVecsFromDateIndex::from_computed::( "price_1w_volatility", version + v2, - indexes_to_1d_returns_1w_sd + returns + .indexes_to_1d_returns_1w_sd .sd .dateindex .as_ref() .map(|v| v.boxed_clone()), - &indexes_to_1d_returns_1w_sd.sd, + &returns.indexes_to_1d_returns_1w_sd.sd, ); let indexes_to_price_1m_volatility = LazyVecsFromDateIndex::from_computed::( "price_1m_volatility", version + v2, - indexes_to_1d_returns_1m_sd + returns + .indexes_to_1d_returns_1m_sd .sd .dateindex .as_ref() .map(|v| v.boxed_clone()), - &indexes_to_1d_returns_1m_sd.sd, + &returns.indexes_to_1d_returns_1m_sd.sd, ); let indexes_to_price_1y_volatility = LazyVecsFromDateIndex::from_computed::( "price_1y_volatility", version + v2, - indexes_to_1d_returns_1y_sd + returns + .indexes_to_1d_returns_1y_sd .sd .dateindex .as_ref() .map(|v| v.boxed_clone()), - &indexes_to_1d_returns_1y_sd.sd, + &returns.indexes_to_1d_returns_1y_sd.sd, ); - Ok(Self { - indexes_to_1d_returns_1w_sd, - indexes_to_1d_returns_1m_sd, - indexes_to_1d_returns_1y_sd, + let dateindex_to_sharpe_1w = returns + ._1w_price_returns + .dateindex + .as_ref() + .zip(indexes_to_price_1w_volatility.dateindex.as_ref()) + .map(|(ret, vol)| { + LazyVecFrom2::transformed::( + "sharpe_1w", + version + v2, + ret.boxed_clone(), + vol.boxed_clone(), + ) + }); + + let dateindex_to_sharpe_1m = returns + ._1m_price_returns + .dateindex + .as_ref() + .zip(indexes_to_price_1m_volatility.dateindex.as_ref()) + .map(|(ret, vol)| { + LazyVecFrom2::transformed::( + "sharpe_1m", + version + v2, + ret.boxed_clone(), + vol.boxed_clone(), + ) + }); + + let dateindex_to_sharpe_1y = returns + ._1y_price_returns + .dateindex + .as_ref() + .zip(indexes_to_price_1y_volatility.dateindex.as_ref()) + .map(|(ret, vol)| { + LazyVecFrom2::transformed::( + "sharpe_1y", + version + v2, + ret.boxed_clone(), + vol.boxed_clone(), + ) + }); + + // Sortino ratio = returns / downside volatility + let dateindex_to_sortino_1w = returns + ._1w_price_returns + .dateindex + .as_ref() + .zip(returns.indexes_to_downside_1w_sd.sd.dateindex.as_ref()) + .map(|(ret, downside_sd)| { + LazyVecFrom2::transformed::( + "sortino_1w", + version + v2, + ret.boxed_clone(), + downside_sd.boxed_clone(), + ) + }); + + let dateindex_to_sortino_1m = returns + ._1m_price_returns + .dateindex + .as_ref() + .zip(returns.indexes_to_downside_1m_sd.sd.dateindex.as_ref()) + .map(|(ret, downside_sd)| { + LazyVecFrom2::transformed::( + "sortino_1m", + version + v2, + ret.boxed_clone(), + downside_sd.boxed_clone(), + ) + }); + + let dateindex_to_sortino_1y = returns + ._1y_price_returns + .dateindex + .as_ref() + .zip(returns.indexes_to_downside_1y_sd.sd.dateindex.as_ref()) + .map(|(ret, downside_sd)| { + LazyVecFrom2::transformed::( + "sortino_1y", + version + v2, + ret.boxed_clone(), + downside_sd.boxed_clone(), + ) + }); + + Self { indexes_to_price_1w_volatility, indexes_to_price_1m_volatility, indexes_to_price_1y_volatility, - }) + dateindex_to_sharpe_1w, + dateindex_to_sharpe_1m, + dateindex_to_sharpe_1y, + dateindex_to_sortino_1w, + dateindex_to_sortino_1m, + dateindex_to_sortino_1y, + } } } diff --git a/crates/brk_computer/src/market/volatility/mod.rs b/crates/brk_computer/src/market/volatility/mod.rs index 1136f9ebd..f8623047a 100644 --- a/crates/brk_computer/src/market/volatility/mod.rs +++ b/crates/brk_computer/src/market/volatility/mod.rs @@ -1,4 +1,3 @@ -mod compute; mod import; mod vecs; diff --git a/crates/brk_computer/src/market/volatility/vecs.rs b/crates/brk_computer/src/market/volatility/vecs.rs index cf1b9b20a..3b59a53f0 100644 --- a/crates/brk_computer/src/market/volatility/vecs.rs +++ b/crates/brk_computer/src/market/volatility/vecs.rs @@ -1,15 +1,21 @@ use brk_traversable::Traversable; -use brk_types::StoredF32; +use brk_types::{DateIndex, StoredF32}; +use vecdb::LazyVecFrom2; -use crate::grouped::{ComputedStandardDeviationVecsFromDateIndex, LazyVecsFromDateIndex}; +use crate::internal::LazyVecsFromDateIndex; -/// Volatility and standard deviation metrics +/// Price volatility metrics (derived from returns standard deviation) #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_1d_returns_1w_sd: ComputedStandardDeviationVecsFromDateIndex, - pub indexes_to_1d_returns_1m_sd: ComputedStandardDeviationVecsFromDateIndex, - pub indexes_to_1d_returns_1y_sd: ComputedStandardDeviationVecsFromDateIndex, pub indexes_to_price_1w_volatility: LazyVecsFromDateIndex, pub indexes_to_price_1m_volatility: LazyVecsFromDateIndex, pub indexes_to_price_1y_volatility: LazyVecsFromDateIndex, + + pub dateindex_to_sharpe_1w: Option>, + pub dateindex_to_sharpe_1m: Option>, + pub dateindex_to_sharpe_1y: Option>, + + pub dateindex_to_sortino_1w: Option>, + pub dateindex_to_sortino_1m: Option>, + pub dateindex_to_sortino_1y: Option>, } diff --git a/crates/brk_computer/src/outputs/compute.rs b/crates/brk_computer/src/outputs/compute.rs new file mode 100644 index 000000000..7c71e3e9a --- /dev/null +++ b/crates/brk_computer/src/outputs/compute.rs @@ -0,0 +1,33 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use super::Vecs; +use crate::{indexes, inputs, scripts, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + inputs: &inputs::Vecs, + scripts: &scripts::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.spent + .compute(&self.db, indexer, inputs, starting_indexes, exit)?; + self.count.compute( + indexer, + indexes, + &inputs.count, + &scripts.count, + starting_indexes, + exit, + )?; + + let _lock = exit.lock(); + self.db.compact()?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/outputs/count/compute.rs b/crates/brk_computer/src/outputs/count/compute.rs new file mode 100644 index 000000000..0ab81f930 --- /dev/null +++ b/crates/brk_computer/src/outputs/count/compute.rs @@ -0,0 +1,77 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{Height, StoredU64}; +use vecdb::{Exit, TypedVecIterator}; + +use super::Vecs; +use crate::{indexes, inputs, scripts, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + inputs_count: &inputs::CountVecs, + scripts_count: &scripts::CountVecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.indexes_to_count.compute_rest( + indexer, + indexes, + starting_indexes, + exit, + Some(&indexes.transaction.txindex_to_output_count), + )?; + + self.indexes_to_utxo_count + .compute_all(indexes, starting_indexes, exit, |v| { + let mut input_count_iter = inputs_count + .indexes_to_count + .height + .unwrap_cumulative() + .into_iter(); + let mut opreturn_count_iter = scripts_count + .indexes_to_opreturn_count + .height_extra + .unwrap_cumulative() + .into_iter(); + v.compute_transform( + starting_indexes.height, + self.indexes_to_count.height.unwrap_cumulative(), + |(h, output_count, ..)| { + let input_count = input_count_iter.get_unwrap(h); + let opreturn_count = opreturn_count_iter.get_unwrap(h); + let block_count = u64::from(h + 1_usize); + // -1 > genesis output is unspendable + let mut utxo_count = + *output_count - (*input_count - block_count) - *opreturn_count - 1; + + // txid dup: e3bf3d07d4b0375638d5f1db5255fe07ba2c4cb067cd81b84ee974b6585fb468 + // Block 91_722 https://mempool.space/block/00000000000271a2dc26e7667f8419f2e15416dc6955e5a6c6cdf3f2574dd08e + // Block 91_880 https://mempool.space/block/00000000000743f190a18c5577a3c2d2a1f610ae9601ac046a38084ccb7cd721 + // + // txid dup: d5d27987d2a3dfc724e359870c6644b40e497bdc0589a033220fe15429d88599 + // Block 91_812 https://mempool.space/block/00000000000af0aed4792b1acee3d966af36cf5def14935db8de83d6f9306f2f + // Block 91_842 https://mempool.space/block/00000000000a4d0a398161ffc163c503763b1f4360639393e0e4c8e300e0caec + // + // Warning: Dups invalidate the previous coinbase according to + // https://chainquery.com/bitcoin-cli/gettxoutsetinfo + + if h >= Height::new(91_842) { + utxo_count -= 1; + } + if h >= Height::new(91_880) { + utxo_count -= 1; + } + + (h, StoredU64::from(utxo_count)) + }, + exit, + )?; + Ok(()) + })?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/outputs/count/import.rs b/crates/brk_computer/src/outputs/count/import.rs new file mode 100644 index 000000000..6975a1bc2 --- /dev/null +++ b/crates/brk_computer/src/outputs/count/import.rs @@ -0,0 +1,41 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, IterableCloneableVec}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromHeight, ComputedVecsFromTxindex, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { + let full_stats = || { + VecBuilderOptions::default() + .add_average() + .add_minmax() + .add_percentiles() + .add_sum() + .add_cumulative() + }; + + Ok(Self { + indexes_to_count: ComputedVecsFromTxindex::forced_import( + db, + "output_count", + Source::Vec(indexes.transaction.txindex_to_output_count.boxed_clone()), + version + Version::ZERO, + indexes, + full_stats(), + )?, + indexes_to_utxo_count: ComputedVecsFromHeight::forced_import( + db, + "exact_utxo_count", + Source::Compute, + version + Version::ZERO, + indexes, + full_stats(), + )?, + }) + } +} diff --git a/crates/brk_computer/src/outputs/count/mod.rs b/crates/brk_computer/src/outputs/count/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/outputs/count/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/outputs/count/vecs.rs b/crates/brk_computer/src/outputs/count/vecs.rs new file mode 100644 index 000000000..d16d76385 --- /dev/null +++ b/crates/brk_computer/src/outputs/count/vecs.rs @@ -0,0 +1,10 @@ +use brk_traversable::Traversable; +use brk_types::StoredU64; + +use crate::internal::{ComputedVecsFromHeight, ComputedVecsFromTxindex}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_count: ComputedVecsFromTxindex, + pub indexes_to_utxo_count: ComputedVecsFromHeight, +} diff --git a/crates/brk_computer/src/outputs/import.rs b/crates/brk_computer/src/outputs/import.rs new file mode 100644 index 000000000..324c2a9df --- /dev/null +++ b/crates/brk_computer/src/outputs/import.rs @@ -0,0 +1,36 @@ +use std::path::Path; + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::Version; +use vecdb::{Database, PAGE_SIZE}; + +use super::{CountVecs, SpentVecs, Vecs}; +use crate::indexes; + +impl Vecs { + pub fn forced_import( + parent_path: &Path, + parent_version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let db = Database::open(&parent_path.join(super::DB_NAME))?; + db.set_min_len(PAGE_SIZE * 10_000_000)?; + + let version = parent_version + Version::ZERO; + + let spent = SpentVecs::forced_import(&db, version)?; + let count = CountVecs::forced_import(&db, version, indexes)?; + + let this = Self { db, spent, count }; + + this.db.retain_regions( + this.iter_any_exportable() + .flat_map(|v| v.region_names()) + .collect(), + )?; + this.db.compact()?; + + Ok(this) + } +} diff --git a/crates/brk_computer/src/outputs/mod.rs b/crates/brk_computer/src/outputs/mod.rs new file mode 100644 index 000000000..ab3e54d27 --- /dev/null +++ b/crates/brk_computer/src/outputs/mod.rs @@ -0,0 +1,22 @@ +pub mod count; +pub mod spent; + +mod compute; +mod import; + +use brk_traversable::Traversable; +use vecdb::Database; + +pub use count::Vecs as CountVecs; +pub use spent::Vecs as SpentVecs; + +pub const DB_NAME: &str = "outputs"; + +#[derive(Clone, Traversable)] +pub struct Vecs { + #[traversable(skip)] + pub(crate) db: Database, + + pub spent: SpentVecs, + pub count: CountVecs, +} diff --git a/crates/brk_computer/src/txouts.rs b/crates/brk_computer/src/outputs/spent/compute.rs similarity index 71% rename from crates/brk_computer/src/txouts.rs rename to crates/brk_computer/src/outputs/spent/compute.rs index 7784c394f..7853ba9be 100644 --- a/crates/brk_computer/src/txouts.rs +++ b/crates/brk_computer/src/outputs/spent/compute.rs @@ -1,65 +1,20 @@ -use std::path::Path; - use brk_error::Result; use brk_indexer::Indexer; -use brk_traversable::Traversable; -use brk_types::{Height, TxInIndex, TxOutIndex, Version}; +use brk_types::{Height, TxInIndex, TxOutIndex}; use log::info; -use vecdb::{ - AnyStoredVec, AnyVec, BytesVec, Database, Exit, GenericStoredVec, ImportableVec, PAGE_SIZE, - Stamp, TypedVecIterator, VecIndex, -}; +use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, Stamp, TypedVecIterator, VecIndex}; -use super::{ComputeIndexes, txins}; +use super::Vecs; +use crate::{inputs, ComputeIndexes}; -pub const DB_NAME: &str = "txouts"; const HEIGHT_BATCH: u32 = 10_000; -#[derive(Clone, Traversable)] -pub struct Vecs { - db: Database, - pub txoutindex_to_txinindex: BytesVec, -} - impl Vecs { - pub fn forced_import(parent_path: &Path, parent_version: Version) -> Result { - let db = Database::open(&parent_path.join(DB_NAME))?; - db.set_min_len(PAGE_SIZE * 10_000_000)?; - - let version = parent_version + Version::ZERO; - - let this = Self { - txoutindex_to_txinindex: BytesVec::forced_import(&db, "txinindex", version)?, - db, - }; - - this.db.retain_regions( - this.iter_any_exportable() - .flat_map(|v| v.region_names()) - .collect(), - )?; - this.db.compact()?; - - Ok(this) - } - pub fn compute( &mut self, + db: &Database, indexer: &Indexer, - txins: &txins::Vecs, - starting_indexes: &ComputeIndexes, - exit: &Exit, - ) -> Result<()> { - self.compute_(indexer, txins, starting_indexes, exit)?; - let _lock = exit.lock(); - self.db.compact()?; - Ok(()) - } - - fn compute_( - &mut self, - indexer: &Indexer, - txins: &txins::Vecs, + inputs: &inputs::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { @@ -82,7 +37,7 @@ impl Vecs { let mut height_to_first_txoutindex = indexer.vecs.txout.height_to_first_txoutindex.iter()?; let mut height_to_first_txinindex = indexer.vecs.txin.height_to_first_txinindex.iter()?; - let mut txinindex_to_txoutindex = txins.txinindex_to_txoutindex.iter()?; + let mut txinindex_to_txoutindex = inputs.spent.txinindex_to_txoutindex.iter()?; // Find starting height from min_txoutindex let mut min_height = Height::ZERO; @@ -124,7 +79,7 @@ impl Vecs { .get_unwrap(batch_start_height) .to_usize(); let txin_end = if batch_end_height >= target_height { - txins.txinindex_to_txoutindex.len() + inputs.spent.txinindex_to_txoutindex.len() } else { height_to_first_txinindex .get_unwrap(batch_end_height + 1_u32) @@ -157,7 +112,7 @@ impl Vecs { "TxOuts: {:.2}%", batch_end_height.to_usize() as f64 / target_height.to_usize() as f64 * 100.0 ); - self.db.flush()?; + db.flush()?; } batch_start_height = batch_end_height + 1_u32; @@ -166,7 +121,7 @@ impl Vecs { let _lock = exit.lock(); self.txoutindex_to_txinindex .stamped_write_with_changes(Stamp::from(target_height))?; - self.db.flush()?; + db.flush()?; Ok(()) } diff --git a/crates/brk_computer/src/outputs/spent/import.rs b/crates/brk_computer/src/outputs/spent/import.rs new file mode 100644 index 000000000..205d100da --- /dev/null +++ b/crates/brk_computer/src/outputs/spent/import.rs @@ -0,0 +1,13 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{BytesVec, Database, ImportableVec}; + +use super::Vecs; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + txoutindex_to_txinindex: BytesVec::forced_import(db, "txinindex", version)?, + }) + } +} diff --git a/crates/brk_computer/src/outputs/spent/mod.rs b/crates/brk_computer/src/outputs/spent/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/outputs/spent/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/outputs/spent/vecs.rs b/crates/brk_computer/src/outputs/spent/vecs.rs new file mode 100644 index 000000000..41543ec83 --- /dev/null +++ b/crates/brk_computer/src/outputs/spent/vecs.rs @@ -0,0 +1,8 @@ +use brk_traversable::Traversable; +use brk_types::{TxInIndex, TxOutIndex}; +use vecdb::BytesVec; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub txoutindex_to_txinindex: BytesVec, +} diff --git a/crates/brk_computer/src/pools/mod.rs b/crates/brk_computer/src/pools/mod.rs index 13acba37e..e4bc57cc5 100644 --- a/crates/brk_computer/src/pools/mod.rs +++ b/crates/brk_computer/src/pools/mod.rs @@ -14,7 +14,7 @@ use vecdb::{ mod vecs; use crate::{ - chain, + blocks, transactions, indexes::{self, ComputeIndexes}, price, }; @@ -36,7 +36,8 @@ impl Vecs { parent_version: Version, indexes: &indexes::Vecs, price: Option<&price::Vecs>, - chain: &chain::Vecs, + blocks: &blocks::Vecs, + transactions: &transactions::Vecs, ) -> Result { let db = Database::open(&parent_path.join(DB_NAME))?; db.set_min_len(PAGE_SIZE * 1_000_000)?; @@ -55,7 +56,8 @@ impl Vecs { version + Version::ZERO, indexes, price, - chain, + blocks, + transactions, ) .map(|vecs| (pool.slug, vecs)) }) @@ -112,9 +114,8 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.height_to_pool.validate_computed_version_or_reset( - self.height_to_pool.version() + indexer.stores.height_to_coinbase_tag.version(), - )?; + self.height_to_pool + .validate_computed_version_or_reset(indexer.stores.height_to_coinbase_tag.version())?; let mut height_to_first_txindex_iter = indexer.vecs.tx.height_to_first_txindex.iter()?; let mut txindex_to_first_txoutindex_iter = @@ -215,7 +216,8 @@ impl Vecs { Ok(()) })?; - self.height_to_pool.safe_write(exit)?; + let _lock = exit.lock(); + self.height_to_pool.write()?; Ok(()) } } diff --git a/crates/brk_computer/src/pools/vecs.rs b/crates/brk_computer/src/pools/vecs.rs index c6b4c6b56..fd8292b53 100644 --- a/crates/brk_computer/src/pools/vecs.rs +++ b/crates/brk_computer/src/pools/vecs.rs @@ -7,8 +7,8 @@ use vecdb::{ }; use crate::{ - chain, - grouped::{ + blocks, transactions, + internal::{ ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, ComputedVecsFromHeight, DollarsPlus, LazyValueVecsFrom2FromHeight, LazyVecsFrom2FromDateIndex, LazyVecsFrom2FromHeight, MaskSats, PercentageU32F32, SatsPlus, SatsPlusToBitcoin, Source, @@ -46,7 +46,8 @@ impl Vecs { parent_version: Version, indexes: &indexes::Vecs, price: Option<&price::Vecs>, - chain: &chain::Vecs, + blocks: &blocks::Vecs, + transactions: &transactions::Vecs, ) -> Result { let suffix = |s: &str| format!("{}_{s}", slug); let compute_dollars = price.is_some(); @@ -89,8 +90,8 @@ impl Vecs { .as_ref() .unwrap() .boxed_clone(), - chain - .coinbase.indexes_to_subsidy + blocks + .rewards.indexes_to_subsidy .sats .height .as_ref() @@ -116,7 +117,7 @@ impl Vecs { .as_ref() .unwrap() .boxed_clone(), - chain.transaction.indexes_to_fee.sats.height.unwrap_sum().boxed_clone(), + transactions.fees.indexes_to_fee.sats.height.unwrap_sum().boxed_clone(), ); let indexes_to_fee = ComputedValueVecsFromHeight::forced_import( @@ -138,14 +139,14 @@ impl Vecs { .as_ref() .unwrap() .boxed_clone(), - chain - .block.indexes_to_block_count + blocks + .count.indexes_to_block_count .height .as_ref() .unwrap() .boxed_clone(), &indexes_to_blocks_mined, - &chain.block.indexes_to_block_count, + &blocks.count.indexes_to_block_count, ), indexes_to_1d_dominance: LazyVecsFrom2FromHeight::from_computed::( &suffix("1d_dominance"), @@ -155,32 +156,32 @@ impl Vecs { .as_ref() .unwrap() .boxed_clone(), - chain - .block.indexes_to_block_count + blocks + .count.indexes_to_block_count .height .as_ref() .unwrap() .boxed_clone(), &indexes_to_blocks_mined, - &chain.block.indexes_to_block_count, + &blocks.count.indexes_to_block_count, ), indexes_to_1w_dominance: LazyVecsFrom2FromDateIndex::from_computed::( &suffix("1w_dominance"), version, &indexes_to_1w_blocks_mined, - &chain.block.indexes_to_1w_block_count, + &blocks.count.indexes_to_1w_block_count, ), indexes_to_1m_dominance: LazyVecsFrom2FromDateIndex::from_computed::( &suffix("1m_dominance"), version, &indexes_to_1m_blocks_mined, - &chain.block.indexes_to_1m_block_count, + &blocks.count.indexes_to_1m_block_count, ), indexes_to_1y_dominance: LazyVecsFrom2FromDateIndex::from_computed::( &suffix("1y_dominance"), version, &indexes_to_1y_blocks_mined, - &chain.block.indexes_to_1y_block_count, + &blocks.count.indexes_to_1y_block_count, ), slug, indexes_to_blocks_mined, diff --git a/crates/brk_computer/src/blks.rs b/crates/brk_computer/src/positions.rs similarity index 89% rename from crates/brk_computer/src/blks.rs rename to crates/brk_computer/src/positions.rs index 93ef03b80..4554f0265 100644 --- a/crates/brk_computer/src/blks.rs +++ b/crates/brk_computer/src/positions.rs @@ -12,7 +12,7 @@ use vecdb::{ use super::ComputeIndexes; -pub const DB_NAME: &str = "blks"; +pub const DB_NAME: &str = "positions"; #[derive(Clone, Traversable)] pub struct Vecs { @@ -66,6 +66,14 @@ impl Vecs { parser: &Reader, exit: &Exit, ) -> Result<()> { + // Validate computed versions against dependencies + let dep_version = indexer.vecs.tx.height_to_first_txindex.version() + + indexer.vecs.tx.txindex_to_height.version(); + self.height_to_position + .validate_computed_version_or_reset(dep_version)?; + self.txindex_to_position + .validate_computed_version_or_reset(dep_version)?; + let min_txindex = TxIndex::from(self.txindex_to_position.len()).min(starting_indexes.txindex); @@ -110,6 +118,7 @@ impl Vecs { self.height_to_position.flush()?; self.txindex_to_position.flush()?; } + Ok(()) })?; diff --git a/crates/brk_computer/src/price.rs b/crates/brk_computer/src/price.rs deleted file mode 100644 index 18c2ba1ff..000000000 --- a/crates/brk_computer/src/price.rs +++ /dev/null @@ -1,846 +0,0 @@ -use std::path::Path; - -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{ - Cents, Close, DateIndex, DecadeIndex, DifficultyEpoch, Dollars, Height, High, Low, MonthIndex, - OHLCDollars, OHLCSats, Open, QuarterIndex, Sats, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use vecdb::{BytesVec, Database, EagerVec, Exit, ImportableVec, PAGE_SIZE, PcoVec}; - -use crate::{fetched, grouped::Source, utils::OptionExt}; - -use super::{ - ComputeIndexes, - grouped::{ComputedVecsFromDateIndex, ComputedVecsFromHeightStrict, VecBuilderOptions}, - indexes, -}; - -pub const DB_NAME: &str = "price"; - -#[derive(Clone, Traversable)] -pub struct Vecs { - db: Database, - - pub dateindex_to_price_close_in_cents: EagerVec>>, - pub dateindex_to_price_high_in_cents: EagerVec>>, - pub dateindex_to_price_low_in_cents: EagerVec>>, - pub dateindex_to_price_ohlc: EagerVec>, - pub dateindex_to_price_ohlc_in_sats: EagerVec>, - pub dateindex_to_price_open_in_cents: EagerVec>>, - pub height_to_price_close_in_cents: EagerVec>>, - pub height_to_price_high_in_cents: EagerVec>>, - pub height_to_price_low_in_cents: EagerVec>>, - pub height_to_price_ohlc: EagerVec>, - pub height_to_price_ohlc_in_sats: EagerVec>, - pub height_to_price_open_in_cents: EagerVec>>, - pub timeindexes_to_price_close: ComputedVecsFromDateIndex>, - pub timeindexes_to_price_high: ComputedVecsFromDateIndex>, - pub timeindexes_to_price_low: ComputedVecsFromDateIndex>, - pub timeindexes_to_price_open: ComputedVecsFromDateIndex>, - pub timeindexes_to_price_open_in_sats: ComputedVecsFromDateIndex>, - pub timeindexes_to_price_high_in_sats: ComputedVecsFromDateIndex>, - pub timeindexes_to_price_low_in_sats: ComputedVecsFromDateIndex>, - pub timeindexes_to_price_close_in_sats: ComputedVecsFromDateIndex>, - pub chainindexes_to_price_close: ComputedVecsFromHeightStrict>, - pub chainindexes_to_price_high: ComputedVecsFromHeightStrict>, - pub chainindexes_to_price_low: ComputedVecsFromHeightStrict>, - pub chainindexes_to_price_open: ComputedVecsFromHeightStrict>, - pub chainindexes_to_price_open_in_sats: ComputedVecsFromHeightStrict>, - pub chainindexes_to_price_high_in_sats: ComputedVecsFromHeightStrict>, - pub chainindexes_to_price_low_in_sats: ComputedVecsFromHeightStrict>, - pub chainindexes_to_price_close_in_sats: ComputedVecsFromHeightStrict>, - pub weekindex_to_price_ohlc: EagerVec>, - pub weekindex_to_price_ohlc_in_sats: EagerVec>, - pub difficultyepoch_to_price_ohlc: EagerVec>, - pub difficultyepoch_to_price_ohlc_in_sats: EagerVec>, - pub monthindex_to_price_ohlc: EagerVec>, - pub monthindex_to_price_ohlc_in_sats: EagerVec>, - pub quarterindex_to_price_ohlc: EagerVec>, - pub quarterindex_to_price_ohlc_in_sats: EagerVec>, - pub semesterindex_to_price_ohlc: EagerVec>, - pub semesterindex_to_price_ohlc_in_sats: EagerVec>, - pub yearindex_to_price_ohlc: EagerVec>, - pub yearindex_to_price_ohlc_in_sats: EagerVec>, - // pub halvingepoch_to_price_ohlc: StorableVec, - // pub halvingepoch_to_price_ohlc_in_sats: StorableVec, - pub decadeindex_to_price_ohlc: EagerVec>, - pub decadeindex_to_price_ohlc_in_sats: EagerVec>, -} - -const VERSION: Version = Version::ZERO; -const VERSION_IN_SATS: Version = Version::ZERO; - -impl Vecs { - pub fn forced_import(parent: &Path, version: Version, indexes: &indexes::Vecs) -> Result { - let db = Database::open(&parent.join(DB_NAME))?; - db.set_min_len(PAGE_SIZE * 1_000_000)?; - - let v = version + VERSION; - let v_sats = version + VERSION + VERSION_IN_SATS; - - macro_rules! eager { - ($name:expr) => { - EagerVec::forced_import(&db, $name, v)? - }; - } - macro_rules! eager_sats { - ($name:expr) => { - EagerVec::forced_import(&db, $name, v_sats)? - }; - } - macro_rules! computed_di { - ($name:expr, $opts:expr) => { - ComputedVecsFromDateIndex::forced_import( - &db, - $name, - Source::Compute, - v, - indexes, - $opts, - )? - }; - } - macro_rules! computed_di_sats { - ($name:expr, $opts:expr) => { - ComputedVecsFromDateIndex::forced_import( - &db, - $name, - Source::Compute, - v_sats, - indexes, - $opts, - )? - }; - } - macro_rules! computed_h { - ($name:expr, $opts:expr) => { - ComputedVecsFromHeightStrict::forced_import(&db, $name, v, indexes, $opts)? - }; - } - macro_rules! computed_h_sats { - ($name:expr, $opts:expr) => { - ComputedVecsFromHeightStrict::forced_import(&db, $name, v_sats, indexes, $opts)? - }; - } - let first = || VecBuilderOptions::default().add_first(); - let last = || VecBuilderOptions::default().add_last(); - let min = || VecBuilderOptions::default().add_min(); - let max = || VecBuilderOptions::default().add_max(); - - let this = Self { - dateindex_to_price_ohlc: eager!("price_ohlc"), - dateindex_to_price_ohlc_in_sats: eager_sats!("price_ohlc_in_sats"), - dateindex_to_price_close_in_cents: eager!("price_close_in_cents"), - dateindex_to_price_high_in_cents: eager!("price_high_in_cents"), - dateindex_to_price_low_in_cents: eager!("price_low_in_cents"), - dateindex_to_price_open_in_cents: eager!("price_open_in_cents"), - height_to_price_ohlc: eager!("price_ohlc"), - height_to_price_ohlc_in_sats: eager_sats!("price_ohlc_in_sats"), - height_to_price_close_in_cents: eager!("price_close_in_cents"), - height_to_price_high_in_cents: eager!("price_high_in_cents"), - height_to_price_low_in_cents: eager!("price_low_in_cents"), - height_to_price_open_in_cents: eager!("price_open_in_cents"), - timeindexes_to_price_open: computed_di!("price_open", first()), - timeindexes_to_price_high: computed_di!("price_high", max()), - timeindexes_to_price_low: computed_di!("price_low", min()), - timeindexes_to_price_close: computed_di!("price_close", last()), - timeindexes_to_price_open_in_sats: computed_di_sats!("price_open_in_sats", first()), - timeindexes_to_price_high_in_sats: computed_di_sats!("price_high_in_sats", max()), - timeindexes_to_price_low_in_sats: computed_di_sats!("price_low_in_sats", min()), - timeindexes_to_price_close_in_sats: computed_di_sats!("price_close_in_sats", last()), - chainindexes_to_price_open: computed_h!("price_open", first()), - chainindexes_to_price_high: computed_h!("price_high", max()), - chainindexes_to_price_low: computed_h!("price_low", min()), - chainindexes_to_price_close: computed_h!("price_close", last()), - chainindexes_to_price_open_in_sats: computed_h_sats!("price_open_in_sats", first()), - chainindexes_to_price_high_in_sats: computed_h_sats!("price_high_in_sats", max()), - chainindexes_to_price_low_in_sats: computed_h_sats!("price_low_in_sats", min()), - chainindexes_to_price_close_in_sats: computed_h_sats!("price_close_in_sats", last()), - weekindex_to_price_ohlc: eager!("price_ohlc"), - weekindex_to_price_ohlc_in_sats: eager_sats!("price_ohlc_in_sats"), - difficultyepoch_to_price_ohlc: eager!("price_ohlc"), - difficultyepoch_to_price_ohlc_in_sats: eager_sats!("price_ohlc_in_sats"), - monthindex_to_price_ohlc: eager!("price_ohlc"), - monthindex_to_price_ohlc_in_sats: eager_sats!("price_ohlc_in_sats"), - quarterindex_to_price_ohlc: eager!("price_ohlc"), - quarterindex_to_price_ohlc_in_sats: eager_sats!("price_ohlc_in_sats"), - semesterindex_to_price_ohlc: eager!("price_ohlc"), - semesterindex_to_price_ohlc_in_sats: eager_sats!("price_ohlc_in_sats"), - yearindex_to_price_ohlc: eager!("price_ohlc"), - yearindex_to_price_ohlc_in_sats: eager_sats!("price_ohlc_in_sats"), - // halvingepoch_to_price_ohlc: StorableVec::forced_import(db, - // "halvingepoch_to_price_ohlc"), version + VERSION + Version::ZERO, format)?, - decadeindex_to_price_ohlc: eager!("price_ohlc"), - decadeindex_to_price_ohlc_in_sats: eager_sats!("price_ohlc_in_sats"), - - db, - }; - - this.db.retain_regions( - this.iter_any_exportable() - .flat_map(|v| v.region_names()) - .collect(), - )?; - this.db.compact()?; - - Ok(this) - } - - pub fn compute( - &mut self, - starting_indexes: &ComputeIndexes, - fetched: &fetched::Vecs, - exit: &Exit, - ) -> Result<()> { - self.compute_(starting_indexes, fetched, exit)?; - let _lock = exit.lock(); - self.db.compact()?; - Ok(()) - } - - fn compute_( - &mut self, - starting_indexes: &ComputeIndexes, - fetched: &fetched::Vecs, - exit: &Exit, - ) -> Result<()> { - self.height_to_price_open_in_cents.compute_transform( - starting_indexes.height, - &fetched.height_to_price_ohlc_in_cents, - |(di, ohlc, ..)| (di, ohlc.open), - exit, - )?; - - self.height_to_price_high_in_cents.compute_transform( - starting_indexes.height, - &fetched.height_to_price_ohlc_in_cents, - |(di, ohlc, ..)| (di, ohlc.high), - exit, - )?; - - self.height_to_price_low_in_cents.compute_transform( - starting_indexes.height, - &fetched.height_to_price_ohlc_in_cents, - |(di, ohlc, ..)| (di, ohlc.low), - exit, - )?; - - self.height_to_price_close_in_cents.compute_transform( - starting_indexes.height, - &fetched.height_to_price_ohlc_in_cents, - |(di, ohlc, ..)| (di, ohlc.close), - exit, - )?; - - self.height_to_price_ohlc.compute_transform( - starting_indexes.height, - &fetched.height_to_price_ohlc_in_cents, - |(h, cents, ..)| (h, OHLCDollars::from(cents)), - exit, - )?; - - self.dateindex_to_price_open_in_cents.compute_transform( - starting_indexes.dateindex, - &fetched.dateindex_to_price_ohlc_in_cents, - |(di, ohlc, ..)| (di, ohlc.open), - exit, - )?; - - self.dateindex_to_price_high_in_cents.compute_transform( - starting_indexes.dateindex, - &fetched.dateindex_to_price_ohlc_in_cents, - |(di, ohlc, ..)| (di, ohlc.high), - exit, - )?; - - self.dateindex_to_price_low_in_cents.compute_transform( - starting_indexes.dateindex, - &fetched.dateindex_to_price_ohlc_in_cents, - |(di, ohlc, ..)| (di, ohlc.low), - exit, - )?; - - self.dateindex_to_price_close_in_cents.compute_transform( - starting_indexes.dateindex, - &fetched.dateindex_to_price_ohlc_in_cents, - |(di, ohlc, ..)| (di, ohlc.close), - exit, - )?; - - self.dateindex_to_price_ohlc.compute_transform( - starting_indexes.dateindex, - &fetched.dateindex_to_price_ohlc_in_cents, - |(di, cents, ..)| (di, OHLCDollars::from(cents)), - exit, - )?; - - self.timeindexes_to_price_close - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &self.dateindex_to_price_ohlc, - |(di, ohlc, ..)| (di, ohlc.close), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_high - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &self.dateindex_to_price_ohlc, - |(di, ohlc, ..)| (di, ohlc.high), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_low - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &self.dateindex_to_price_ohlc, - |(di, ohlc, ..)| (di, ohlc.low), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_open - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &self.dateindex_to_price_ohlc, - |(di, ohlc, ..)| (di, ohlc.open), - exit, - )?; - Ok(()) - })?; - - self.chainindexes_to_price_close - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.height_to_price_ohlc, - |(h, ohlc, ..)| (h, ohlc.close), - exit, - )?; - Ok(()) - })?; - - self.chainindexes_to_price_high - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.height_to_price_ohlc, - |(h, ohlc, ..)| (h, ohlc.high), - exit, - )?; - Ok(()) - })?; - - self.chainindexes_to_price_low - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.height_to_price_ohlc, - |(h, ohlc, ..)| (h, ohlc.low), - exit, - )?; - Ok(()) - })?; - - self.chainindexes_to_price_open - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.height_to_price_ohlc, - |(h, ohlc, ..)| (h, ohlc.open), - exit, - )?; - Ok(()) - })?; - - self.weekindex_to_price_ohlc.compute_transform4( - starting_indexes.weekindex, - self.timeindexes_to_price_open.weekindex.unwrap_first(), - self.timeindexes_to_price_high.weekindex.unwrap_max(), - self.timeindexes_to_price_low.weekindex.unwrap_min(), - self.timeindexes_to_price_close.weekindex.unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.difficultyepoch_to_price_ohlc.compute_transform4( - starting_indexes.difficultyepoch, - self.chainindexes_to_price_open - .difficultyepoch - .unwrap_first(), - self.chainindexes_to_price_high.difficultyepoch.unwrap_max(), - self.chainindexes_to_price_low.difficultyepoch.unwrap_min(), - self.chainindexes_to_price_close - .difficultyepoch - .unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.monthindex_to_price_ohlc.compute_transform4( - starting_indexes.monthindex, - self.timeindexes_to_price_open.monthindex.unwrap_first(), - self.timeindexes_to_price_high.monthindex.unwrap_max(), - self.timeindexes_to_price_low.monthindex.unwrap_min(), - self.timeindexes_to_price_close.monthindex.unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.quarterindex_to_price_ohlc.compute_transform4( - starting_indexes.quarterindex, - self.timeindexes_to_price_open.quarterindex.unwrap_first(), - self.timeindexes_to_price_high.quarterindex.unwrap_max(), - self.timeindexes_to_price_low.quarterindex.unwrap_min(), - self.timeindexes_to_price_close.quarterindex.unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.semesterindex_to_price_ohlc.compute_transform4( - starting_indexes.semesterindex, - self.timeindexes_to_price_open.semesterindex.unwrap_first(), - self.timeindexes_to_price_high.semesterindex.unwrap_max(), - self.timeindexes_to_price_low.semesterindex.unwrap_min(), - self.timeindexes_to_price_close.semesterindex.unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.yearindex_to_price_ohlc.compute_transform4( - starting_indexes.yearindex, - self.timeindexes_to_price_open.yearindex.unwrap_first(), - self.timeindexes_to_price_high.yearindex.unwrap_max(), - self.timeindexes_to_price_low.yearindex.unwrap_min(), - self.timeindexes_to_price_close.yearindex.unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.decadeindex_to_price_ohlc.compute_transform4( - starting_indexes.decadeindex, - self.timeindexes_to_price_open.decadeindex.unwrap_first(), - self.timeindexes_to_price_high.decadeindex.unwrap_max(), - self.timeindexes_to_price_low.decadeindex.unwrap_min(), - self.timeindexes_to_price_close.decadeindex.unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.chainindexes_to_price_open_in_sats - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.chainindexes_to_price_open.height, - |(i, open, ..)| (i, Open::new(Sats::ONE_BTC / *open)), - exit, - )?; - Ok(()) - })?; - - self.chainindexes_to_price_high_in_sats - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.chainindexes_to_price_low.height, - |(i, low, ..)| (i, High::new(Sats::ONE_BTC / *low)), - exit, - )?; - Ok(()) - })?; - - self.chainindexes_to_price_low_in_sats - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.chainindexes_to_price_high.height, - |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), - exit, - )?; - Ok(()) - })?; - - self.chainindexes_to_price_close_in_sats - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.chainindexes_to_price_close.height, - |(i, close, ..)| (i, Close::new(Sats::ONE_BTC / *close)), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_open_in_sats - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - self.timeindexes_to_price_open.dateindex.u(), - |(i, open, ..)| (i, Open::new(Sats::ONE_BTC / *open)), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_high_in_sats - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - self.timeindexes_to_price_low.dateindex.u(), - |(i, low, ..)| (i, High::new(Sats::ONE_BTC / *low)), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_low_in_sats - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - self.timeindexes_to_price_high.dateindex.u(), - |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_close_in_sats - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - self.timeindexes_to_price_close.dateindex.u(), - |(i, close, ..)| (i, Close::new(Sats::ONE_BTC / *close)), - exit, - )?; - Ok(()) - })?; - - self.height_to_price_ohlc_in_sats.compute_transform4( - starting_indexes.height, - &self.chainindexes_to_price_open_in_sats.height, - &self.chainindexes_to_price_high_in_sats.height, - &self.chainindexes_to_price_low_in_sats.height, - &self.chainindexes_to_price_close_in_sats.height, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.dateindex_to_price_ohlc_in_sats.compute_transform4( - starting_indexes.dateindex, - self.timeindexes_to_price_open_in_sats - .dateindex - .as_ref() - .unwrap(), - self.timeindexes_to_price_high_in_sats - .dateindex - .as_ref() - .unwrap(), - self.timeindexes_to_price_low_in_sats - .dateindex - .as_ref() - .unwrap(), - self.timeindexes_to_price_close_in_sats - .dateindex - .as_ref() - .unwrap(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.weekindex_to_price_ohlc_in_sats.compute_transform4( - starting_indexes.weekindex, - self.timeindexes_to_price_open_in_sats - .weekindex - .unwrap_first(), - self.timeindexes_to_price_high_in_sats - .weekindex - .unwrap_max(), - self.timeindexes_to_price_low_in_sats.weekindex.unwrap_min(), - self.timeindexes_to_price_close_in_sats - .weekindex - .unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.difficultyepoch_to_price_ohlc_in_sats - .compute_transform4( - starting_indexes.difficultyepoch, - self.chainindexes_to_price_open_in_sats - .difficultyepoch - .unwrap_first(), - self.chainindexes_to_price_high_in_sats - .difficultyepoch - .unwrap_max(), - self.chainindexes_to_price_low_in_sats - .difficultyepoch - .unwrap_min(), - self.chainindexes_to_price_close_in_sats - .difficultyepoch - .unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.monthindex_to_price_ohlc_in_sats.compute_transform4( - starting_indexes.monthindex, - self.timeindexes_to_price_open_in_sats - .monthindex - .unwrap_first(), - self.timeindexes_to_price_high_in_sats - .monthindex - .unwrap_max(), - self.timeindexes_to_price_low_in_sats - .monthindex - .unwrap_min(), - self.timeindexes_to_price_close_in_sats - .monthindex - .unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.quarterindex_to_price_ohlc_in_sats.compute_transform4( - starting_indexes.quarterindex, - self.timeindexes_to_price_open_in_sats - .quarterindex - .unwrap_first(), - self.timeindexes_to_price_high_in_sats - .quarterindex - .unwrap_max(), - self.timeindexes_to_price_low_in_sats - .quarterindex - .unwrap_min(), - self.timeindexes_to_price_close_in_sats - .quarterindex - .unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.semesterindex_to_price_ohlc_in_sats - .compute_transform4( - starting_indexes.semesterindex, - self.timeindexes_to_price_open_in_sats - .semesterindex - .unwrap_first(), - self.timeindexes_to_price_high_in_sats - .semesterindex - .unwrap_max(), - self.timeindexes_to_price_low_in_sats - .semesterindex - .unwrap_min(), - self.timeindexes_to_price_close_in_sats - .semesterindex - .unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.yearindex_to_price_ohlc_in_sats.compute_transform4( - starting_indexes.yearindex, - self.timeindexes_to_price_open_in_sats - .yearindex - .unwrap_first(), - self.timeindexes_to_price_high_in_sats - .yearindex - .unwrap_max(), - self.timeindexes_to_price_low_in_sats.yearindex.unwrap_min(), - self.timeindexes_to_price_close_in_sats - .yearindex - .unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.decadeindex_to_price_ohlc_in_sats.compute_transform4( - starting_indexes.decadeindex, - self.timeindexes_to_price_open_in_sats - .decadeindex - .unwrap_first(), - self.timeindexes_to_price_high_in_sats - .decadeindex - .unwrap_max(), - self.timeindexes_to_price_low_in_sats - .decadeindex - .unwrap_min(), - self.timeindexes_to_price_close_in_sats - .decadeindex - .unwrap_last(), - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/price/compute.rs b/crates/brk_computer/src/price/compute.rs new file mode 100644 index 000000000..0401ce347 --- /dev/null +++ b/crates/brk_computer/src/price/compute.rs @@ -0,0 +1,17 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::Vecs; +use crate::ComputeIndexes; + +impl Vecs { + pub fn compute(&mut self, starting_indexes: &ComputeIndexes, exit: &Exit) -> Result<()> { + self.usd.compute(starting_indexes, exit)?; + + self.sats.compute(starting_indexes, &self.usd, exit)?; + + let _lock = exit.lock(); + self.db().compact()?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/price/fetch.rs b/crates/brk_computer/src/price/fetch.rs new file mode 100644 index 000000000..795ebaaaf --- /dev/null +++ b/crates/brk_computer/src/price/fetch.rs @@ -0,0 +1,103 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{DateIndex, Height, OHLCCents}; +use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex}; + +use crate::{indexes, utils::OptionExt, ComputeIndexes}; + +use super::Vecs; + +impl Vecs { + pub fn fetch( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + let Some(fetcher) = self.fetcher.as_mut() else { + return Ok(()); + }; + + // Validate computed versions against dependencies + let height_dep_version = indexer.vecs.block.height_to_timestamp.version(); + self.ohlc + .height_to_ohlc_in_cents + .validate_computed_version_or_reset(height_dep_version)?; + + let dateindex_dep_version = indexes.time.dateindex_to_date.version(); + self.ohlc + .dateindex_to_ohlc_in_cents + .validate_computed_version_or_reset(dateindex_dep_version)?; + + let height_to_timestamp = &indexer.vecs.block.height_to_timestamp; + let index = starting_indexes + .height + .min(Height::from(self.ohlc.height_to_ohlc_in_cents.len())); + let mut prev_timestamp = index + .decremented() + .map(|prev_i| height_to_timestamp.iter().unwrap().get_unwrap(prev_i)); + height_to_timestamp + .iter()? + .enumerate() + .skip(index.to_usize()) + .try_for_each(|(i, v)| -> Result<()> { + self.ohlc.height_to_ohlc_in_cents.truncate_push_at( + i, + fetcher + .get_height(i.into(), v, prev_timestamp) + .unwrap(), + )?; + prev_timestamp = Some(v); + Ok(()) + })?; + { + let _lock = exit.lock(); + self.ohlc.height_to_ohlc_in_cents.write()?; + } + + let index = starting_indexes + .dateindex + .min(DateIndex::from(self.ohlc.dateindex_to_ohlc_in_cents.len())); + let mut prev = Some(index.decremented().map_or(OHLCCents::default(), |prev_i| { + self.ohlc + .dateindex_to_ohlc_in_cents + .iter() + .unwrap() + .get_unwrap(prev_i) + })); + indexes + .time + .dateindex_to_date + .iter() + .enumerate() + .skip(index.to_usize()) + .try_for_each(|(i, d)| -> Result<()> { + let ohlc = if i.to_usize() + 100 >= self.ohlc.dateindex_to_ohlc_in_cents.len() + && let Ok(mut ohlc) = fetcher.get_date(d) + { + let prev_open = *prev.u().close; + *ohlc.open = prev_open; + *ohlc.high = (*ohlc.high).max(prev_open); + *ohlc.low = (*ohlc.low).min(prev_open); + ohlc + } else { + prev.clone().unwrap() + }; + + prev.replace(ohlc.clone()); + + self.ohlc + .dateindex_to_ohlc_in_cents + .truncate_push_at(i, ohlc)?; + + Ok(()) + })?; + { + let _lock = exit.lock(); + self.ohlc.dateindex_to_ohlc_in_cents.write()?; + } + + Ok(()) + } +} diff --git a/crates/brk_computer/src/price/mod.rs b/crates/brk_computer/src/price/mod.rs new file mode 100644 index 000000000..aec9a0a2e --- /dev/null +++ b/crates/brk_computer/src/price/mod.rs @@ -0,0 +1,84 @@ +mod compute; +mod fetch; + +pub mod ohlc; +pub mod sats; +pub mod usd; + +pub use ohlc::Vecs as OhlcVecs; +pub use sats::Vecs as SatsVecs; +pub use usd::Vecs as UsdVecs; + +use std::path::Path; + +use brk_fetcher::Fetcher; +use brk_traversable::Traversable; +use brk_types::Version; +use vecdb::{Database, PAGE_SIZE}; + +use crate::indexes; + +pub const DB_NAME: &str = "price"; + +#[derive(Clone, Traversable)] +pub struct Vecs { + #[traversable(skip)] + pub(crate) db: Database, + + #[traversable(skip)] + pub(crate) fetcher: Option, + + pub ohlc: OhlcVecs, + pub usd: UsdVecs, + pub sats: SatsVecs, +} + +impl Vecs { + pub fn forced_import( + parent: &Path, + version: Version, + indexes: &indexes::Vecs, + fetcher: Option, + ) -> brk_error::Result { + let db = Database::open(&parent.join(DB_NAME))?; + db.set_min_len(PAGE_SIZE * 1_000_000)?; + + let this = Self::forced_import_inner(&db, version, indexes, fetcher)?; + + this.db.retain_regions( + this.iter_any_exportable() + .flat_map(|v| v.region_names()) + .collect(), + )?; + this.db.compact()?; + + Ok(this) + } + + fn forced_import_inner( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + fetcher: Option, + ) -> brk_error::Result { + let ohlc = OhlcVecs::forced_import(db, version)?; + let usd = UsdVecs::forced_import(db, version, indexes, &ohlc)?; + let sats = SatsVecs::forced_import(db, version, indexes)?; + + Ok(Self { + db: db.clone(), + fetcher, + ohlc, + usd, + sats, + }) + } + + pub fn has_fetcher(&self) -> bool { + self.fetcher.is_some() + } + + pub(crate) fn db(&self) -> &Database { + &self.db + } +} diff --git a/crates/brk_computer/src/price/ohlc/import.rs b/crates/brk_computer/src/price/ohlc/import.rs new file mode 100644 index 000000000..d7cffd70d --- /dev/null +++ b/crates/brk_computer/src/price/ohlc/import.rs @@ -0,0 +1,22 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{BytesVec, Database, ImportableVec}; + +use super::Vecs; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + dateindex_to_ohlc_in_cents: BytesVec::forced_import( + db, + "ohlc_in_cents", + version, + )?, + height_to_ohlc_in_cents: BytesVec::forced_import( + db, + "ohlc_in_cents", + version, + )?, + }) + } +} diff --git a/crates/brk_computer/src/price/ohlc/mod.rs b/crates/brk_computer/src/price/ohlc/mod.rs new file mode 100644 index 000000000..f8623047a --- /dev/null +++ b/crates/brk_computer/src/price/ohlc/mod.rs @@ -0,0 +1,4 @@ +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/price/ohlc/vecs.rs b/crates/brk_computer/src/price/ohlc/vecs.rs new file mode 100644 index 000000000..0dbdf0367 --- /dev/null +++ b/crates/brk_computer/src/price/ohlc/vecs.rs @@ -0,0 +1,9 @@ +use brk_traversable::Traversable; +use brk_types::{DateIndex, Height, OHLCCents}; +use vecdb::BytesVec; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub dateindex_to_ohlc_in_cents: BytesVec, + pub height_to_ohlc_in_cents: BytesVec, +} diff --git a/crates/brk_computer/src/price/sats/compute.rs b/crates/brk_computer/src/price/sats/compute.rs new file mode 100644 index 000000000..32d0d7963 --- /dev/null +++ b/crates/brk_computer/src/price/sats/compute.rs @@ -0,0 +1,357 @@ +use brk_error::Result; +use brk_types::{Close, High, Low, OHLCSats, Open, Sats}; +use vecdb::Exit; + +use super::Vecs; +use super::super::usd; +use crate::{utils::OptionExt, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + starting_indexes: &ComputeIndexes, + usd: &usd::Vecs, + exit: &Exit, + ) -> Result<()> { + // Chain indexes in sats (1 BTC / price) + self.chainindexes_to_price_open_in_sats + .compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &usd.chainindexes_to_price_open.height, + |(i, open, ..)| (i, Open::new(Sats::ONE_BTC / *open)), + exit, + )?; + Ok(()) + })?; + + self.chainindexes_to_price_high_in_sats + .compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &usd.chainindexes_to_price_low.height, + |(i, low, ..)| (i, High::new(Sats::ONE_BTC / *low)), + exit, + )?; + Ok(()) + })?; + + self.chainindexes_to_price_low_in_sats + .compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &usd.chainindexes_to_price_high.height, + |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), + exit, + )?; + Ok(()) + })?; + + self.chainindexes_to_price_close_in_sats + .compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &usd.chainindexes_to_price_close.height, + |(i, close, ..)| (i, Close::new(Sats::ONE_BTC / *close)), + exit, + )?; + Ok(()) + })?; + + // Time indexes in sats + self.timeindexes_to_price_open_in_sats + .compute_all(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + usd.timeindexes_to_price_open.dateindex.u(), + |(i, open, ..)| (i, Open::new(Sats::ONE_BTC / *open)), + exit, + )?; + Ok(()) + })?; + + self.timeindexes_to_price_high_in_sats + .compute_all(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + usd.timeindexes_to_price_low.dateindex.u(), + |(i, low, ..)| (i, High::new(Sats::ONE_BTC / *low)), + exit, + )?; + Ok(()) + })?; + + self.timeindexes_to_price_low_in_sats + .compute_all(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + usd.timeindexes_to_price_high.dateindex.u(), + |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), + exit, + )?; + Ok(()) + })?; + + self.timeindexes_to_price_close_in_sats + .compute_all(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + usd.timeindexes_to_price_close.dateindex.u(), + |(i, close, ..)| (i, Close::new(Sats::ONE_BTC / *close)), + exit, + )?; + Ok(()) + })?; + + // Height OHLC in sats + self.height_to_price_ohlc_in_sats.compute_transform4( + starting_indexes.height, + &self.chainindexes_to_price_open_in_sats.height, + &self.chainindexes_to_price_high_in_sats.height, + &self.chainindexes_to_price_low_in_sats.height, + &self.chainindexes_to_price_close_in_sats.height, + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + // DateIndex OHLC in sats + self.dateindex_to_price_ohlc_in_sats.compute_transform4( + starting_indexes.dateindex, + self.timeindexes_to_price_open_in_sats + .dateindex + .as_ref() + .unwrap(), + self.timeindexes_to_price_high_in_sats + .dateindex + .as_ref() + .unwrap(), + self.timeindexes_to_price_low_in_sats + .dateindex + .as_ref() + .unwrap(), + self.timeindexes_to_price_close_in_sats + .dateindex + .as_ref() + .unwrap(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + // Period OHLC in sats + self.weekindex_to_price_ohlc_in_sats.compute_transform4( + starting_indexes.weekindex, + self.timeindexes_to_price_open_in_sats + .weekindex + .unwrap_first(), + self.timeindexes_to_price_high_in_sats + .weekindex + .unwrap_max(), + self.timeindexes_to_price_low_in_sats.weekindex.unwrap_min(), + self.timeindexes_to_price_close_in_sats + .weekindex + .unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.difficultyepoch_to_price_ohlc_in_sats + .compute_transform4( + starting_indexes.difficultyepoch, + self.chainindexes_to_price_open_in_sats + .difficultyepoch + .unwrap_first(), + self.chainindexes_to_price_high_in_sats + .difficultyepoch + .unwrap_max(), + self.chainindexes_to_price_low_in_sats + .difficultyepoch + .unwrap_min(), + self.chainindexes_to_price_close_in_sats + .difficultyepoch + .unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.monthindex_to_price_ohlc_in_sats.compute_transform4( + starting_indexes.monthindex, + self.timeindexes_to_price_open_in_sats + .monthindex + .unwrap_first(), + self.timeindexes_to_price_high_in_sats + .monthindex + .unwrap_max(), + self.timeindexes_to_price_low_in_sats + .monthindex + .unwrap_min(), + self.timeindexes_to_price_close_in_sats + .monthindex + .unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.quarterindex_to_price_ohlc_in_sats.compute_transform4( + starting_indexes.quarterindex, + self.timeindexes_to_price_open_in_sats + .quarterindex + .unwrap_first(), + self.timeindexes_to_price_high_in_sats + .quarterindex + .unwrap_max(), + self.timeindexes_to_price_low_in_sats + .quarterindex + .unwrap_min(), + self.timeindexes_to_price_close_in_sats + .quarterindex + .unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.semesterindex_to_price_ohlc_in_sats + .compute_transform4( + starting_indexes.semesterindex, + self.timeindexes_to_price_open_in_sats + .semesterindex + .unwrap_first(), + self.timeindexes_to_price_high_in_sats + .semesterindex + .unwrap_max(), + self.timeindexes_to_price_low_in_sats + .semesterindex + .unwrap_min(), + self.timeindexes_to_price_close_in_sats + .semesterindex + .unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.yearindex_to_price_ohlc_in_sats.compute_transform4( + starting_indexes.yearindex, + self.timeindexes_to_price_open_in_sats + .yearindex + .unwrap_first(), + self.timeindexes_to_price_high_in_sats + .yearindex + .unwrap_max(), + self.timeindexes_to_price_low_in_sats.yearindex.unwrap_min(), + self.timeindexes_to_price_close_in_sats + .yearindex + .unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.decadeindex_to_price_ohlc_in_sats.compute_transform4( + starting_indexes.decadeindex, + self.timeindexes_to_price_open_in_sats + .decadeindex + .unwrap_first(), + self.timeindexes_to_price_high_in_sats + .decadeindex + .unwrap_max(), + self.timeindexes_to_price_low_in_sats + .decadeindex + .unwrap_min(), + self.timeindexes_to_price_close_in_sats + .decadeindex + .unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/price/sats/import.rs b/crates/brk_computer/src/price/sats/import.rs new file mode 100644 index 000000000..6300a962d --- /dev/null +++ b/crates/brk_computer/src/price/sats/import.rs @@ -0,0 +1,130 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, EagerVec, ImportableVec}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeightStrict, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let first = || VecBuilderOptions::default().add_first(); + let last = || VecBuilderOptions::default().add_last(); + let min = || VecBuilderOptions::default().add_min(); + let max = || VecBuilderOptions::default().add_max(); + + Ok(Self { + dateindex_to_price_ohlc_in_sats: EagerVec::forced_import( + db, + "price_ohlc_in_sats", + version, + )?, + height_to_price_ohlc_in_sats: EagerVec::forced_import( + db, + "price_ohlc_in_sats", + version, + )?, + timeindexes_to_price_open_in_sats: ComputedVecsFromDateIndex::forced_import( + db, + "price_open_in_sats", + Source::Compute, + version, + indexes, + first(), + )?, + timeindexes_to_price_high_in_sats: ComputedVecsFromDateIndex::forced_import( + db, + "price_high_in_sats", + Source::Compute, + version, + indexes, + max(), + )?, + timeindexes_to_price_low_in_sats: ComputedVecsFromDateIndex::forced_import( + db, + "price_low_in_sats", + Source::Compute, + version, + indexes, + min(), + )?, + timeindexes_to_price_close_in_sats: ComputedVecsFromDateIndex::forced_import( + db, + "price_close_in_sats", + Source::Compute, + version, + indexes, + last(), + )?, + chainindexes_to_price_open_in_sats: ComputedVecsFromHeightStrict::forced_import( + db, + "price_open_in_sats", + version, + indexes, + first(), + )?, + chainindexes_to_price_high_in_sats: ComputedVecsFromHeightStrict::forced_import( + db, + "price_high_in_sats", + version, + indexes, + max(), + )?, + chainindexes_to_price_low_in_sats: ComputedVecsFromHeightStrict::forced_import( + db, + "price_low_in_sats", + version, + indexes, + min(), + )?, + chainindexes_to_price_close_in_sats: ComputedVecsFromHeightStrict::forced_import( + db, + "price_close_in_sats", + version, + indexes, + last(), + )?, + weekindex_to_price_ohlc_in_sats: EagerVec::forced_import( + db, + "price_ohlc_in_sats", + version, + )?, + difficultyepoch_to_price_ohlc_in_sats: EagerVec::forced_import( + db, + "price_ohlc_in_sats", + version, + )?, + monthindex_to_price_ohlc_in_sats: EagerVec::forced_import( + db, + "price_ohlc_in_sats", + version, + )?, + quarterindex_to_price_ohlc_in_sats: EagerVec::forced_import( + db, + "price_ohlc_in_sats", + version, + )?, + semesterindex_to_price_ohlc_in_sats: EagerVec::forced_import( + db, + "price_ohlc_in_sats", + version, + )?, + yearindex_to_price_ohlc_in_sats: EagerVec::forced_import( + db, + "price_ohlc_in_sats", + version, + )?, + decadeindex_to_price_ohlc_in_sats: EagerVec::forced_import( + db, + "price_ohlc_in_sats", + version, + )?, + }) + } +} diff --git a/crates/brk_computer/src/price/sats/mod.rs b/crates/brk_computer/src/price/sats/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/price/sats/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/price/sats/vecs.rs b/crates/brk_computer/src/price/sats/vecs.rs new file mode 100644 index 000000000..75aec7eee --- /dev/null +++ b/crates/brk_computer/src/price/sats/vecs.rs @@ -0,0 +1,36 @@ +use brk_traversable::Traversable; +use brk_types::{ + Close, DateIndex, DecadeIndex, DifficultyEpoch, Height, High, Low, MonthIndex, + OHLCSats, Open, QuarterIndex, Sats, SemesterIndex, WeekIndex, YearIndex, +}; +use vecdb::{BytesVec, EagerVec}; + +use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeightStrict}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + // OHLC in sats + pub dateindex_to_price_ohlc_in_sats: EagerVec>, + pub height_to_price_ohlc_in_sats: EagerVec>, + + // Computed time indexes in sats + pub timeindexes_to_price_open_in_sats: ComputedVecsFromDateIndex>, + pub timeindexes_to_price_high_in_sats: ComputedVecsFromDateIndex>, + pub timeindexes_to_price_low_in_sats: ComputedVecsFromDateIndex>, + pub timeindexes_to_price_close_in_sats: ComputedVecsFromDateIndex>, + + // Computed chain indexes in sats + pub chainindexes_to_price_open_in_sats: ComputedVecsFromHeightStrict>, + pub chainindexes_to_price_high_in_sats: ComputedVecsFromHeightStrict>, + pub chainindexes_to_price_low_in_sats: ComputedVecsFromHeightStrict>, + pub chainindexes_to_price_close_in_sats: ComputedVecsFromHeightStrict>, + + // Period OHLC in sats + pub weekindex_to_price_ohlc_in_sats: EagerVec>, + pub difficultyepoch_to_price_ohlc_in_sats: EagerVec>, + pub monthindex_to_price_ohlc_in_sats: EagerVec>, + pub quarterindex_to_price_ohlc_in_sats: EagerVec>, + pub semesterindex_to_price_ohlc_in_sats: EagerVec>, + pub yearindex_to_price_ohlc_in_sats: EagerVec>, + pub decadeindex_to_price_ohlc_in_sats: EagerVec>, +} diff --git a/crates/brk_computer/src/price/usd/compute.rs b/crates/brk_computer/src/price/usd/compute.rs new file mode 100644 index 000000000..569fad6f9 --- /dev/null +++ b/crates/brk_computer/src/price/usd/compute.rs @@ -0,0 +1,251 @@ +use brk_error::Result; +use brk_types::OHLCDollars; +use vecdb::Exit; + +use super::Vecs; +use crate::ComputeIndexes; + +impl Vecs { + pub fn compute( + &mut self, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + // Timeindexes computed vecs + self.timeindexes_to_price_close + .compute_all(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + &self.dateindex_to_price_ohlc, + |(di, ohlc, ..)| (di, ohlc.close), + exit, + )?; + Ok(()) + })?; + + self.timeindexes_to_price_high + .compute_all(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + &self.dateindex_to_price_ohlc, + |(di, ohlc, ..)| (di, ohlc.high), + exit, + )?; + Ok(()) + })?; + + self.timeindexes_to_price_low + .compute_all(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + &self.dateindex_to_price_ohlc, + |(di, ohlc, ..)| (di, ohlc.low), + exit, + )?; + Ok(()) + })?; + + self.timeindexes_to_price_open + .compute_all(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + &self.dateindex_to_price_ohlc, + |(di, ohlc, ..)| (di, ohlc.open), + exit, + )?; + Ok(()) + })?; + + // Chainindexes computed vecs + self.chainindexes_to_price_close + .compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &self.height_to_price_ohlc, + |(h, ohlc, ..)| (h, ohlc.close), + exit, + )?; + Ok(()) + })?; + + self.chainindexes_to_price_high + .compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &self.height_to_price_ohlc, + |(h, ohlc, ..)| (h, ohlc.high), + exit, + )?; + Ok(()) + })?; + + self.chainindexes_to_price_low + .compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &self.height_to_price_ohlc, + |(h, ohlc, ..)| (h, ohlc.low), + exit, + )?; + Ok(()) + })?; + + self.chainindexes_to_price_open + .compute(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + &self.height_to_price_ohlc, + |(h, ohlc, ..)| (h, ohlc.open), + exit, + )?; + Ok(()) + })?; + + // Period OHLC aggregates + self.weekindex_to_price_ohlc.compute_transform4( + starting_indexes.weekindex, + self.timeindexes_to_price_open.weekindex.unwrap_first(), + self.timeindexes_to_price_high.weekindex.unwrap_max(), + self.timeindexes_to_price_low.weekindex.unwrap_min(), + self.timeindexes_to_price_close.weekindex.unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.difficultyepoch_to_price_ohlc.compute_transform4( + starting_indexes.difficultyepoch, + self.chainindexes_to_price_open + .difficultyepoch + .unwrap_first(), + self.chainindexes_to_price_high.difficultyepoch.unwrap_max(), + self.chainindexes_to_price_low.difficultyepoch.unwrap_min(), + self.chainindexes_to_price_close + .difficultyepoch + .unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.monthindex_to_price_ohlc.compute_transform4( + starting_indexes.monthindex, + self.timeindexes_to_price_open.monthindex.unwrap_first(), + self.timeindexes_to_price_high.monthindex.unwrap_max(), + self.timeindexes_to_price_low.monthindex.unwrap_min(), + self.timeindexes_to_price_close.monthindex.unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.quarterindex_to_price_ohlc.compute_transform4( + starting_indexes.quarterindex, + self.timeindexes_to_price_open.quarterindex.unwrap_first(), + self.timeindexes_to_price_high.quarterindex.unwrap_max(), + self.timeindexes_to_price_low.quarterindex.unwrap_min(), + self.timeindexes_to_price_close.quarterindex.unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.semesterindex_to_price_ohlc.compute_transform4( + starting_indexes.semesterindex, + self.timeindexes_to_price_open.semesterindex.unwrap_first(), + self.timeindexes_to_price_high.semesterindex.unwrap_max(), + self.timeindexes_to_price_low.semesterindex.unwrap_min(), + self.timeindexes_to_price_close.semesterindex.unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.yearindex_to_price_ohlc.compute_transform4( + starting_indexes.yearindex, + self.timeindexes_to_price_open.yearindex.unwrap_first(), + self.timeindexes_to_price_high.yearindex.unwrap_max(), + self.timeindexes_to_price_low.yearindex.unwrap_min(), + self.timeindexes_to_price_close.yearindex.unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.decadeindex_to_price_ohlc.compute_transform4( + starting_indexes.decadeindex, + self.timeindexes_to_price_open.decadeindex.unwrap_first(), + self.timeindexes_to_price_high.decadeindex.unwrap_max(), + self.timeindexes_to_price_low.decadeindex.unwrap_min(), + self.timeindexes_to_price_close.decadeindex.unwrap_last(), + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/price/usd/import.rs b/crates/brk_computer/src/price/usd/import.rs new file mode 100644 index 000000000..76efc6c9c --- /dev/null +++ b/crates/brk_computer/src/price/usd/import.rs @@ -0,0 +1,174 @@ +use brk_error::Result; +use brk_types::{DateIndex, Height, OHLCDollars, Version}; +use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1}; + +use super::super::ohlc; +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeightStrict, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + ohlc: &ohlc::Vecs, + ) -> Result { + let first = || VecBuilderOptions::default().add_first(); + let last = || VecBuilderOptions::default().add_last(); + let min = || VecBuilderOptions::default().add_min(); + let max = || VecBuilderOptions::default().add_max(); + + let height_to_price_ohlc = LazyVecFrom1::init( + "price_ohlc", + version, + ohlc.height_to_ohlc_in_cents.boxed_clone(), + |height: Height, ohlc_iter| ohlc_iter.get(height).map(OHLCDollars::from), + ); + + let height_to_price_open_in_cents = LazyVecFrom1::init( + "price_open_in_cents", + version, + ohlc.height_to_ohlc_in_cents.boxed_clone(), + |height: Height, ohlc_iter| ohlc_iter.get(height).map(|o| o.open), + ); + + let height_to_price_high_in_cents = LazyVecFrom1::init( + "price_high_in_cents", + version, + ohlc.height_to_ohlc_in_cents.boxed_clone(), + |height: Height, ohlc_iter| ohlc_iter.get(height).map(|o| o.high), + ); + + let height_to_price_low_in_cents = LazyVecFrom1::init( + "price_low_in_cents", + version, + ohlc.height_to_ohlc_in_cents.boxed_clone(), + |height: Height, ohlc_iter| ohlc_iter.get(height).map(|o| o.low), + ); + + let height_to_price_close_in_cents = LazyVecFrom1::init( + "price_close_in_cents", + version, + ohlc.height_to_ohlc_in_cents.boxed_clone(), + |height: Height, ohlc_iter| ohlc_iter.get(height).map(|o| o.close), + ); + + let dateindex_to_price_open_in_cents = LazyVecFrom1::init( + "price_open_in_cents", + version, + ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), + |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(|o| o.open), + ); + + let dateindex_to_price_high_in_cents = LazyVecFrom1::init( + "price_high_in_cents", + version, + ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), + |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(|o| o.high), + ); + + let dateindex_to_price_low_in_cents = LazyVecFrom1::init( + "price_low_in_cents", + version, + ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), + |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(|o| o.low), + ); + + let dateindex_to_price_close_in_cents = LazyVecFrom1::init( + "price_close_in_cents", + version, + ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), + |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(|o| o.close), + ); + + let dateindex_to_price_ohlc = LazyVecFrom1::init( + "price_ohlc", + version, + ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), + |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(OHLCDollars::from), + ); + + Ok(Self { + dateindex_to_price_ohlc, + dateindex_to_price_close_in_cents, + dateindex_to_price_high_in_cents, + dateindex_to_price_low_in_cents, + dateindex_to_price_open_in_cents, + height_to_price_close_in_cents, + height_to_price_high_in_cents, + height_to_price_low_in_cents, + height_to_price_open_in_cents, + timeindexes_to_price_open: ComputedVecsFromDateIndex::forced_import( + db, + "price_open", + Source::Compute, + version, + indexes, + first(), + )?, + timeindexes_to_price_high: ComputedVecsFromDateIndex::forced_import( + db, + "price_high", + Source::Compute, + version, + indexes, + max(), + )?, + timeindexes_to_price_low: ComputedVecsFromDateIndex::forced_import( + db, + "price_low", + Source::Compute, + version, + indexes, + min(), + )?, + timeindexes_to_price_close: ComputedVecsFromDateIndex::forced_import( + db, + "price_close", + Source::Compute, + version, + indexes, + last(), + )?, + chainindexes_to_price_open: ComputedVecsFromHeightStrict::forced_import( + db, + "price_open", + version, + indexes, + first(), + )?, + chainindexes_to_price_high: ComputedVecsFromHeightStrict::forced_import( + db, + "price_high", + version, + indexes, + max(), + )?, + chainindexes_to_price_low: ComputedVecsFromHeightStrict::forced_import( + db, + "price_low", + version, + indexes, + min(), + )?, + chainindexes_to_price_close: ComputedVecsFromHeightStrict::forced_import( + db, + "price_close", + version, + indexes, + last(), + )?, + weekindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, + difficultyepoch_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, + monthindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, + quarterindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, + semesterindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, + yearindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, + decadeindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, + height_to_price_ohlc, + }) + } +} diff --git a/crates/brk_computer/src/price/usd/mod.rs b/crates/brk_computer/src/price/usd/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/price/usd/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/price/usd/vecs.rs b/crates/brk_computer/src/price/usd/vecs.rs new file mode 100644 index 000000000..f6a8b4084 --- /dev/null +++ b/crates/brk_computer/src/price/usd/vecs.rs @@ -0,0 +1,46 @@ +use brk_traversable::Traversable; +use brk_types::{ + Cents, Close, DateIndex, DecadeIndex, DifficultyEpoch, Dollars, Height, High, Low, MonthIndex, + OHLCCents, OHLCDollars, Open, QuarterIndex, SemesterIndex, WeekIndex, YearIndex, +}; +use vecdb::{BytesVec, EagerVec, LazyVecFrom1}; + +use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeightStrict}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + // Derived price data in cents + pub dateindex_to_price_close_in_cents: LazyVecFrom1, DateIndex, OHLCCents>, + pub dateindex_to_price_high_in_cents: LazyVecFrom1, DateIndex, OHLCCents>, + pub dateindex_to_price_low_in_cents: LazyVecFrom1, DateIndex, OHLCCents>, + pub dateindex_to_price_open_in_cents: LazyVecFrom1, DateIndex, OHLCCents>, + pub height_to_price_close_in_cents: LazyVecFrom1, Height, OHLCCents>, + pub height_to_price_high_in_cents: LazyVecFrom1, Height, OHLCCents>, + pub height_to_price_low_in_cents: LazyVecFrom1, Height, OHLCCents>, + pub height_to_price_open_in_cents: LazyVecFrom1, Height, OHLCCents>, + + // OHLC in dollars + pub dateindex_to_price_ohlc: LazyVecFrom1, + pub height_to_price_ohlc: LazyVecFrom1, + + // Computed time indexes + pub timeindexes_to_price_close: ComputedVecsFromDateIndex>, + pub timeindexes_to_price_high: ComputedVecsFromDateIndex>, + pub timeindexes_to_price_low: ComputedVecsFromDateIndex>, + pub timeindexes_to_price_open: ComputedVecsFromDateIndex>, + + // Computed chain indexes + pub chainindexes_to_price_close: ComputedVecsFromHeightStrict>, + pub chainindexes_to_price_high: ComputedVecsFromHeightStrict>, + pub chainindexes_to_price_low: ComputedVecsFromHeightStrict>, + pub chainindexes_to_price_open: ComputedVecsFromHeightStrict>, + + // Period OHLC + pub weekindex_to_price_ohlc: EagerVec>, + pub difficultyepoch_to_price_ohlc: EagerVec>, + pub monthindex_to_price_ohlc: EagerVec>, + pub quarterindex_to_price_ohlc: EagerVec>, + pub semesterindex_to_price_ohlc: EagerVec>, + pub yearindex_to_price_ohlc: EagerVec>, + pub decadeindex_to_price_ohlc: EagerVec>, +} diff --git a/crates/brk_computer/src/scripts/compute.rs b/crates/brk_computer/src/scripts/compute.rs new file mode 100644 index 000000000..9a8483043 --- /dev/null +++ b/crates/brk_computer/src/scripts/compute.rs @@ -0,0 +1,28 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use crate::{indexes, price, ComputeIndexes}; + +use super::Vecs; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.count + .compute(indexer, indexes, starting_indexes, exit)?; + + self.value + .compute(indexer, indexes, price, starting_indexes, exit)?; + + let _lock = exit.lock(); + self.db.compact()?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/chain/output_type/compute.rs b/crates/brk_computer/src/scripts/count/compute.rs similarity index 69% rename from crates/brk_computer/src/chain/output_type/compute.rs rename to crates/brk_computer/src/scripts/count/compute.rs index 1bc3df9d8..d0754b8d3 100644 --- a/crates/brk_computer/src/chain/output_type/compute.rs +++ b/crates/brk_computer/src/scripts/count/compute.rs @@ -1,17 +1,16 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::{Height, StoredU64}; +use brk_types::StoredU64; use vecdb::{Exit, TypedVecIterator}; use super::Vecs; -use crate::{chain::transaction, indexes, ComputeIndexes}; +use crate::{indexes, utils::OptionExt, ComputeIndexes}; impl Vecs { pub fn compute( &mut self, indexer: &Indexer, indexes: &indexes::Vecs, - transaction_vecs: &transaction::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { @@ -147,51 +146,18 @@ impl Vecs { Ok(()) })?; - self.indexes_to_exact_utxo_count + // Compute segwit_count = p2wpkh + p2wsh + p2tr + let mut p2wsh_iter = self.indexes_to_p2wsh_count.height.u().into_iter(); + let mut p2tr_iter = self.indexes_to_p2tr_count.height.u().into_iter(); + + self.indexes_to_segwit_count .compute_all(indexes, starting_indexes, exit, |v| { - let mut input_count_iter = transaction_vecs - .indexes_to_input_count - .height - .unwrap_cumulative() - .into_iter(); - let mut opreturn_count_iter = self - .indexes_to_opreturn_count - .height_extra - .unwrap_cumulative() - .into_iter(); v.compute_transform( starting_indexes.height, - transaction_vecs - .indexes_to_output_count - .height - .unwrap_cumulative(), - |(h, output_count, ..)| { - let input_count = input_count_iter.get_unwrap(h); - let opreturn_count = opreturn_count_iter.get_unwrap(h); - let block_count = u64::from(h + 1_usize); - // -1 > genesis output is unspendable - let mut utxo_count = - *output_count - (*input_count - block_count) - *opreturn_count - 1; - - // txid dup: e3bf3d07d4b0375638d5f1db5255fe07ba2c4cb067cd81b84ee974b6585fb468 - // Block 91_722 https://mempool.space/block/00000000000271a2dc26e7667f8419f2e15416dc6955e5a6c6cdf3f2574dd08e - // Block 91_880 https://mempool.space/block/00000000000743f190a18c5577a3c2d2a1f610ae9601ac046a38084ccb7cd721 - // - // txid dup: d5d27987d2a3dfc724e359870c6644b40e497bdc0589a033220fe15429d88599 - // Block 91_812 https://mempool.space/block/00000000000af0aed4792b1acee3d966af36cf5def14935db8de83d6f9306f2f - // Block 91_842 https://mempool.space/block/00000000000a4d0a398161ffc163c503763b1f4360639393e0e4c8e300e0caec - // - // Warning: Dups invalidate the previous coinbase according to - // https://chainquery.com/bitcoin-cli/gettxoutsetinfo - - if h >= Height::new(91_842) { - utxo_count -= 1; - } - if h >= Height::new(91_880) { - utxo_count -= 1; - } - - (h, StoredU64::from(utxo_count)) + self.indexes_to_p2wpkh_count.height.u(), + |(h, p2wpkh, ..)| { + let sum = *p2wpkh + *p2wsh_iter.get_unwrap(h) + *p2tr_iter.get_unwrap(h); + (h, StoredU64::from(sum)) }, exit, )?; diff --git a/crates/brk_computer/src/scripts/count/import.rs b/crates/brk_computer/src/scripts/count/import.rs new file mode 100644 index 000000000..3ee20cda4 --- /dev/null +++ b/crates/brk_computer/src/scripts/count/import.rs @@ -0,0 +1,177 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, IterableCloneableVec}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ + ComputedVecsFromHeight, LazyVecsFrom2FromHeight, PercentageU64F32, Source, + VecBuilderOptions, + }, + outputs, + utils::OptionExt, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + outputs: &outputs::Vecs, + ) -> Result { + let v0 = Version::ZERO; + let full_stats = || { + VecBuilderOptions::default() + .add_average() + .add_minmax() + .add_percentiles() + .add_sum() + .add_cumulative() + }; + + let indexes_to_p2a_count = ComputedVecsFromHeight::forced_import( + db, + "p2a_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + let indexes_to_p2ms_count = ComputedVecsFromHeight::forced_import( + db, + "p2ms_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + let indexes_to_p2pk33_count = ComputedVecsFromHeight::forced_import( + db, + "p2pk33_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + let indexes_to_p2pk65_count = ComputedVecsFromHeight::forced_import( + db, + "p2pk65_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + let indexes_to_p2pkh_count = ComputedVecsFromHeight::forced_import( + db, + "p2pkh_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + let indexes_to_p2sh_count = ComputedVecsFromHeight::forced_import( + db, + "p2sh_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + let indexes_to_p2tr_count = ComputedVecsFromHeight::forced_import( + db, + "p2tr_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + let indexes_to_p2wpkh_count = ComputedVecsFromHeight::forced_import( + db, + "p2wpkh_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + let indexes_to_p2wsh_count = ComputedVecsFromHeight::forced_import( + db, + "p2wsh_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + + // Aggregate counts (computed from per-type counts) + let indexes_to_segwit_count = ComputedVecsFromHeight::forced_import( + db, + "segwit_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?; + + // Adoption ratios (lazy) + // Uses outputs.count.indexes_to_count as denominator (total output count) + // At height level: per-block ratio; at dateindex level: sum-based ratio (% of new outputs) + let indexes_to_taproot_adoption = + LazyVecsFrom2FromHeight::from_height_and_txindex::( + "taproot_adoption", + version + v0, + indexes_to_p2tr_count.height.u().boxed_clone(), + outputs.count.indexes_to_count.height.sum.u().boxed_clone(), + &indexes_to_p2tr_count, + &outputs.count.indexes_to_count, + ); + let indexes_to_segwit_adoption = + LazyVecsFrom2FromHeight::from_height_and_txindex::( + "segwit_adoption", + version + v0, + indexes_to_segwit_count.height.u().boxed_clone(), + outputs.count.indexes_to_count.height.sum.u().boxed_clone(), + &indexes_to_segwit_count, + &outputs.count.indexes_to_count, + ); + + Ok(Self { + indexes_to_p2a_count, + indexes_to_p2ms_count, + indexes_to_p2pk33_count, + indexes_to_p2pk65_count, + indexes_to_p2pkh_count, + indexes_to_p2sh_count, + indexes_to_p2tr_count, + indexes_to_p2wpkh_count, + indexes_to_p2wsh_count, + indexes_to_opreturn_count: ComputedVecsFromHeight::forced_import( + db, + "opreturn_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?, + indexes_to_emptyoutput_count: ComputedVecsFromHeight::forced_import( + db, + "emptyoutput_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?, + indexes_to_unknownoutput_count: ComputedVecsFromHeight::forced_import( + db, + "unknownoutput_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?, + indexes_to_segwit_count, + indexes_to_taproot_adoption, + indexes_to_segwit_adoption, + }) + } +} diff --git a/crates/brk_computer/src/scripts/count/mod.rs b/crates/brk_computer/src/scripts/count/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/scripts/count/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/chain/output_type/vecs.rs b/crates/brk_computer/src/scripts/count/vecs.rs similarity index 56% rename from crates/brk_computer/src/chain/output_type/vecs.rs rename to crates/brk_computer/src/scripts/count/vecs.rs index ab3dc5cd7..7c990e3fb 100644 --- a/crates/brk_computer/src/chain/output_type/vecs.rs +++ b/crates/brk_computer/src/scripts/count/vecs.rs @@ -1,11 +1,11 @@ use brk_traversable::Traversable; -use brk_types::StoredU64; +use brk_types::{StoredF32, StoredU64}; -use crate::grouped::ComputedVecsFromHeight; +use crate::internal::{ComputedVecsFromHeight, LazyVecsFrom2FromHeight}; -/// Output type count metrics #[derive(Clone, Traversable)] pub struct Vecs { + // Per-type output counts pub indexes_to_p2a_count: ComputedVecsFromHeight, pub indexes_to_p2ms_count: ComputedVecsFromHeight, pub indexes_to_p2pk33_count: ComputedVecsFromHeight, @@ -18,5 +18,15 @@ pub struct Vecs { pub indexes_to_opreturn_count: ComputedVecsFromHeight, pub indexes_to_emptyoutput_count: ComputedVecsFromHeight, pub indexes_to_unknownoutput_count: ComputedVecsFromHeight, - pub indexes_to_exact_utxo_count: ComputedVecsFromHeight, + + // Aggregate counts + /// SegWit output count (p2wpkh + p2wsh + p2tr) + pub indexes_to_segwit_count: ComputedVecsFromHeight, + + // Adoption ratios (lazy) + // Denominator is outputs.count.indexes_to_count (total output count) + /// Taproot adoption: p2tr / total_outputs * 100 + pub indexes_to_taproot_adoption: LazyVecsFrom2FromHeight, + /// SegWit adoption: segwit / total_outputs * 100 + pub indexes_to_segwit_adoption: LazyVecsFrom2FromHeight, } diff --git a/crates/brk_computer/src/scripts/import.rs b/crates/brk_computer/src/scripts/import.rs new file mode 100644 index 000000000..c97215216 --- /dev/null +++ b/crates/brk_computer/src/scripts/import.rs @@ -0,0 +1,40 @@ +use std::path::Path; + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::Version; +use vecdb::{Database, PAGE_SIZE}; + +use crate::{indexes, outputs, price}; + +use super::{CountVecs, ValueVecs, Vecs}; + +impl Vecs { + pub fn forced_import( + parent_path: &Path, + parent_version: Version, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + outputs: &outputs::Vecs, + ) -> Result { + let db = Database::open(&parent_path.join(super::DB_NAME))?; + db.set_min_len(PAGE_SIZE * 50_000_000)?; + + let version = parent_version + Version::ZERO; + let compute_dollars = price.is_some(); + + let count = CountVecs::forced_import(&db, version, indexes, outputs)?; + let value = ValueVecs::forced_import(&db, version, indexes, compute_dollars)?; + + let this = Self { db, count, value }; + + this.db.retain_regions( + this.iter_any_exportable() + .flat_map(|v| v.region_names()) + .collect(), + )?; + this.db.compact()?; + + Ok(this) + } +} diff --git a/crates/brk_computer/src/scripts/mod.rs b/crates/brk_computer/src/scripts/mod.rs new file mode 100644 index 000000000..2cd216732 --- /dev/null +++ b/crates/brk_computer/src/scripts/mod.rs @@ -0,0 +1,22 @@ +pub mod count; +pub mod value; + +mod compute; +mod import; + +use brk_traversable::Traversable; +use vecdb::Database; + +pub use count::Vecs as CountVecs; +pub use value::Vecs as ValueVecs; + +pub const DB_NAME: &str = "scripts"; + +#[derive(Clone, Traversable)] +pub struct Vecs { + #[traversable(skip)] + pub(crate) db: Database, + + pub count: CountVecs, + pub value: ValueVecs, +} diff --git a/crates/brk_computer/src/scripts/value/compute.rs b/crates/brk_computer/src/scripts/value/compute.rs new file mode 100644 index 000000000..d94085b6f --- /dev/null +++ b/crates/brk_computer/src/scripts/value/compute.rs @@ -0,0 +1,87 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{Height, OutputType, Sats, TxOutIndex}; +use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, TypedVecIterator, VecIndex}; + +use super::Vecs; +use crate::{indexes, price, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + // Validate computed versions against dependencies + let dep_version = indexer.vecs.txout.height_to_first_txoutindex.version() + + indexer.vecs.txout.txoutindex_to_outputtype.version() + + indexer.vecs.txout.txoutindex_to_value.version(); + self.height_to_opreturn_value + .validate_computed_version_or_reset(dep_version)?; + + // Get target height + let target_len = indexer.vecs.txout.height_to_first_txoutindex.len(); + if target_len == 0 { + return Ok(()); + } + let target_height = Height::from(target_len - 1); + + // Find starting height for this vec + let current_len = self.height_to_opreturn_value.len(); + let starting_height = Height::from(current_len.min(starting_indexes.height.to_usize())); + + if starting_height > target_height { + return Ok(()); + } + + // Prepare iterators + let mut height_to_first_txoutindex = + indexer.vecs.txout.height_to_first_txoutindex.iter()?; + let mut txoutindex_to_outputtype = indexer.vecs.txout.txoutindex_to_outputtype.iter()?; + let mut txoutindex_to_value = indexer.vecs.txout.txoutindex_to_value.iter()?; + + // Iterate blocks + for h in starting_height.to_usize()..=target_height.to_usize() { + let height = Height::from(h); + + // Get output range for this block + let first_txoutindex = height_to_first_txoutindex.get_unwrap(height); + let next_first_txoutindex = if height < target_height { + height_to_first_txoutindex.get_unwrap(height.incremented()) + } else { + TxOutIndex::from(indexer.vecs.txout.txoutindex_to_value.len()) + }; + + // Sum opreturn values + let mut opreturn_value = Sats::ZERO; + for i in first_txoutindex.to_usize()..next_first_txoutindex.to_usize() { + let txoutindex = TxOutIndex::from(i); + let outputtype = txoutindex_to_outputtype.get_unwrap(txoutindex); + + if outputtype == OutputType::OpReturn { + let value = txoutindex_to_value.get_unwrap(txoutindex); + opreturn_value += value; + } + } + + self.height_to_opreturn_value + .truncate_push(height, opreturn_value)?; + } + + self.height_to_opreturn_value.write()?; + + // Compute derived vecs (dateindex aggregations, etc.) + self.indexes_to_opreturn_value.compute_rest( + indexes, + price, + starting_indexes, + exit, + Some(&self.height_to_opreturn_value), + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/scripts/value/import.rs b/crates/brk_computer/src/scripts/value/import.rs new file mode 100644 index 000000000..a6c4e02a6 --- /dev/null +++ b/crates/brk_computer/src/scripts/value/import.rs @@ -0,0 +1,42 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedValueVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + compute_dollars: bool, + ) -> Result { + let v0 = Version::ZERO; + + let height_to_opreturn_value = + EagerVec::forced_import(db, "opreturn_value", version + v0)?; + + let indexes_to_opreturn_value = ComputedValueVecsFromHeight::forced_import( + db, + "opreturn_value", + Source::Vec(height_to_opreturn_value.boxed_clone()), + version + v0, + VecBuilderOptions::default() + .add_sum() + .add_cumulative() + .add_average() + .add_minmax(), + compute_dollars, + indexes, + )?; + + Ok(Self { + height_to_opreturn_value, + indexes_to_opreturn_value, + }) + } +} diff --git a/crates/brk_computer/src/scripts/value/mod.rs b/crates/brk_computer/src/scripts/value/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/scripts/value/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/scripts/value/vecs.rs b/crates/brk_computer/src/scripts/value/vecs.rs new file mode 100644 index 000000000..28f8c7fd1 --- /dev/null +++ b/crates/brk_computer/src/scripts/value/vecs.rs @@ -0,0 +1,11 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Sats}; +use vecdb::{EagerVec, PcoVec}; + +use crate::internal::ComputedValueVecsFromHeight; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub height_to_opreturn_value: EagerVec>, + pub indexes_to_opreturn_value: ComputedValueVecsFromHeight, +} diff --git a/crates/brk_computer/src/stateful/metrics/price/mod.rs b/crates/brk_computer/src/stateful/metrics/price/mod.rs deleted file mode 100644 index f7f06ebc7..000000000 --- a/crates/brk_computer/src/stateful/metrics/price/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod paid; -mod relative; - -pub use paid::*; -pub use relative::*; diff --git a/crates/brk_computer/src/supply/burned/compute.rs b/crates/brk_computer/src/supply/burned/compute.rs new file mode 100644 index 000000000..a73537c3b --- /dev/null +++ b/crates/brk_computer/src/supply/burned/compute.rs @@ -0,0 +1,115 @@ +use brk_error::Result; +use brk_types::{Height, Sats}; +use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, TypedVecIterator, VecIndex}; + +use super::Vecs; +use crate::{blocks, indexes, price, scripts, utils::OptionExt, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + scripts: &scripts::Vecs, + blocks: &blocks::Vecs, + starting_indexes: &ComputeIndexes, + price: Option<&price::Vecs>, + exit: &Exit, + ) -> Result<()> { + // Validate computed versions against dependencies + let opreturn_dep_version = scripts.value.height_to_opreturn_value.version(); + self.height_to_opreturn + .validate_computed_version_or_reset(opreturn_dep_version)?; + + let unspendable_dep_version = self.height_to_opreturn.version() + + blocks + .rewards + .indexes_to_unclaimed_rewards + .sats + .height + .u() + .version(); + self.height_to_unspendable + .validate_computed_version_or_reset(unspendable_dep_version)?; + + // 1. Copy per-block opreturn values from scripts + let scripts_target = scripts.value.height_to_opreturn_value.len(); + if scripts_target > 0 { + let target_height = Height::from(scripts_target - 1); + let current_len = self.height_to_opreturn.len(); + let starting_height = Height::from(current_len.min(starting_indexes.height.to_usize())); + + if starting_height <= target_height { + let mut opreturn_value_iter = scripts.value.height_to_opreturn_value.into_iter(); + + for h in starting_height.to_usize()..=target_height.to_usize() { + let height = Height::from(h); + let value = opreturn_value_iter.get_unwrap(height); + self.height_to_opreturn.truncate_push(height, value)?; + } + } + } + + self.height_to_opreturn.write()?; + + // 2. Compute per-block unspendable = opreturn + unclaimed_rewards + genesis (at height 0) + let opreturn_target = self.height_to_opreturn.len(); + if opreturn_target > 0 { + let target_height = Height::from(opreturn_target - 1); + let current_len = self.height_to_unspendable.len(); + let starting_height = Height::from(current_len.min(starting_indexes.height.to_usize())); + + if starting_height <= target_height { + let mut opreturn_iter = self.height_to_opreturn.into_iter(); + let mut unclaimed_rewards_iter = blocks + .rewards + .indexes_to_unclaimed_rewards + .sats + .height + .u() + .into_iter(); + + for h in starting_height.to_usize()..=target_height.to_usize() { + let height = Height::from(h); + + // Genesis block 50 BTC is unspendable (only at height 0) + let genesis = if height == Height::ZERO { + Sats::FIFTY_BTC + } else { + Sats::ZERO + }; + + // Per-block opreturn value + let opreturn = opreturn_iter.get_unwrap(height); + + // Per-block unclaimed rewards + let unclaimed = unclaimed_rewards_iter.get_unwrap(height); + + let unspendable = genesis + opreturn + unclaimed; + self.height_to_unspendable + .truncate_push(height, unspendable)?; + } + } + } + + self.height_to_unspendable.write()?; + + // Compute index aggregations + self.indexes_to_opreturn.compute_rest( + indexes, + price, + starting_indexes, + exit, + Some(&self.height_to_opreturn), + )?; + + self.indexes_to_unspendable.compute_rest( + indexes, + price, + starting_indexes, + exit, + Some(&self.height_to_unspendable), + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/supply/burned/import.rs b/crates/brk_computer/src/supply/burned/import.rs new file mode 100644 index 000000000..437277c4c --- /dev/null +++ b/crates/brk_computer/src/supply/burned/import.rs @@ -0,0 +1,52 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedValueVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + compute_dollars: bool, + ) -> Result { + let v0 = Version::ZERO; + + let height_to_opreturn = EagerVec::forced_import(db, "opreturn_supply", version + v0)?; + + let indexes_to_opreturn = ComputedValueVecsFromHeight::forced_import( + db, + "opreturn_supply", + Source::Vec(height_to_opreturn.boxed_clone()), + version + v0, + VecBuilderOptions::default().add_last().add_cumulative(), + compute_dollars, + indexes, + )?; + + let height_to_unspendable = + EagerVec::forced_import(db, "unspendable_supply", version + v0)?; + + let indexes_to_unspendable = ComputedValueVecsFromHeight::forced_import( + db, + "unspendable_supply", + Source::Vec(height_to_unspendable.boxed_clone()), + version + v0, + VecBuilderOptions::default().add_last().add_cumulative(), + compute_dollars, + indexes, + )?; + + Ok(Self { + height_to_opreturn, + height_to_unspendable, + indexes_to_opreturn, + indexes_to_unspendable, + }) + } +} diff --git a/crates/brk_computer/src/supply/burned/mod.rs b/crates/brk_computer/src/supply/burned/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/supply/burned/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/supply/burned/vecs.rs b/crates/brk_computer/src/supply/burned/vecs.rs new file mode 100644 index 000000000..5234344e3 --- /dev/null +++ b/crates/brk_computer/src/supply/burned/vecs.rs @@ -0,0 +1,14 @@ +use brk_traversable::Traversable; +use brk_types::{Height, Sats}; +use vecdb::{EagerVec, PcoVec}; + +use crate::internal::ComputedValueVecsFromHeight; + +/// Burned/unspendable supply metrics +#[derive(Clone, Traversable)] +pub struct Vecs { + pub height_to_opreturn: EagerVec>, + pub height_to_unspendable: EagerVec>, + pub indexes_to_opreturn: ComputedValueVecsFromHeight, + pub indexes_to_unspendable: ComputedValueVecsFromHeight, +} diff --git a/crates/brk_computer/src/supply/circulating/import.rs b/crates/brk_computer/src/supply/circulating/import.rs new file mode 100644 index 000000000..6d9959a49 --- /dev/null +++ b/crates/brk_computer/src/supply/circulating/import.rs @@ -0,0 +1,57 @@ +use brk_types::Version; +use vecdb::{IterableCloneableVec, LazyVecFrom1}; + +use super::Vecs; +use crate::{ + distribution, + internal::{DollarsIdentity, LazyValueVecsFromDateIndex, SatsIdentity, SatsToBitcoin}, +}; + +impl Vecs { + pub fn import(version: Version, distribution: &distribution::Vecs) -> Self { + let v0 = Version::ZERO; + + // Reference distribution's actual circulating supply lazily + let supply_metrics = &distribution.utxo_cohorts.all.metrics.supply; + + let height_to_sats = LazyVecFrom1::init( + "circulating_sats", + version + v0, + supply_metrics.height_to_supply.boxed_clone(), + |height, iter| iter.get(height), + ); + + let height_to_btc = LazyVecFrom1::transformed::( + "circulating_btc", + version + v0, + supply_metrics.height_to_supply.boxed_clone(), + ); + + let height_to_usd = supply_metrics + .height_to_supply_value + .dollars + .as_ref() + .map(|d| { + LazyVecFrom1::init( + "circulating_usd", + version + v0, + d.boxed_clone(), + |height, iter| iter.get(height), + ) + }); + + // Create lazy identity wrapper around the FULL supply (not half!) + let indexes = LazyValueVecsFromDateIndex::from_source::< + SatsIdentity, + SatsToBitcoin, + DollarsIdentity, + >("circulating", &supply_metrics.indexes_to_supply, version + v0); + + Self { + height_to_sats, + height_to_btc, + height_to_usd, + indexes, + } + } +} diff --git a/crates/brk_computer/src/supply/circulating/mod.rs b/crates/brk_computer/src/supply/circulating/mod.rs new file mode 100644 index 000000000..f8623047a --- /dev/null +++ b/crates/brk_computer/src/supply/circulating/mod.rs @@ -0,0 +1,4 @@ +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/supply/circulating/vecs.rs b/crates/brk_computer/src/supply/circulating/vecs.rs new file mode 100644 index 000000000..cfac77c16 --- /dev/null +++ b/crates/brk_computer/src/supply/circulating/vecs.rs @@ -0,0 +1,14 @@ +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Dollars, Height, Sats}; +use vecdb::LazyVecFrom1; + +use crate::internal::LazyValueVecsFromDateIndex; + +/// Circulating supply - lazy references to distribution's actual supply +#[derive(Clone, Traversable)] +pub struct Vecs { + pub height_to_sats: LazyVecFrom1, + pub height_to_btc: LazyVecFrom1, + pub height_to_usd: Option>, + pub indexes: LazyValueVecsFromDateIndex, +} diff --git a/crates/brk_computer/src/supply/compute.rs b/crates/brk_computer/src/supply/compute.rs new file mode 100644 index 000000000..9997a4965 --- /dev/null +++ b/crates/brk_computer/src/supply/compute.rs @@ -0,0 +1,39 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::Vecs; +use crate::{blocks, distribution, indexes, price, scripts, transactions, ComputeIndexes}; + +impl Vecs { + #[allow(clippy::too_many_arguments)] + pub fn compute( + &mut self, + indexes: &indexes::Vecs, + scripts: &scripts::Vecs, + blocks: &blocks::Vecs, + transactions: &transactions::Vecs, + distribution: &distribution::Vecs, + starting_indexes: &ComputeIndexes, + price: Option<&price::Vecs>, + exit: &Exit, + ) -> Result<()> { + // 1. Compute burned/unspendable supply + self.burned + .compute(indexes, scripts, blocks, starting_indexes, price, exit)?; + + // 2. Compute inflation rate + self.inflation + .compute(blocks, distribution, starting_indexes, exit)?; + + // 3. Compute velocity + self.velocity + .compute(transactions, distribution, starting_indexes, exit)?; + + // Note: circulating and market_cap are lazy - no compute needed + + let _lock = exit.lock(); + self.db.compact()?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/supply/import.rs b/crates/brk_computer/src/supply/import.rs new file mode 100644 index 000000000..bde71b2c1 --- /dev/null +++ b/crates/brk_computer/src/supply/import.rs @@ -0,0 +1,61 @@ +use std::path::Path; + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::Version; +use vecdb::{Database, PAGE_SIZE}; + +use super::Vecs; +use crate::{distribution, indexes, price}; + +const VERSION: Version = Version::ZERO; + +impl Vecs { + pub fn forced_import( + parent: &Path, + parent_version: Version, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + distribution: &distribution::Vecs, + ) -> Result { + let db = Database::open(&parent.join(super::DB_NAME))?; + db.set_min_len(PAGE_SIZE * 10_000_000)?; + + let version = parent_version + VERSION; + let compute_dollars = price.is_some(); + + // Circulating supply - lazy refs to distribution + let circulating = super::circulating::Vecs::import(version, distribution); + + // Burned/unspendable supply - computed from scripts + let burned = super::burned::Vecs::forced_import(&db, version, indexes, compute_dollars)?; + + // Inflation rate + let inflation = super::inflation::Vecs::forced_import(&db, version, indexes)?; + + // Velocity + let velocity = + super::velocity::Vecs::forced_import(&db, version, indexes, compute_dollars)?; + + // Market cap - lazy refs to supply in USD + let market_cap = super::market_cap::Vecs::import(version, distribution); + + let this = Self { + db, + circulating, + burned, + inflation, + velocity, + market_cap, + }; + + this.db.retain_regions( + this.iter_any_exportable() + .flat_map(|v| v.region_names()) + .collect(), + )?; + this.db.compact()?; + + Ok(this) + } +} diff --git a/crates/brk_computer/src/supply/inflation/compute.rs b/crates/brk_computer/src/supply/inflation/compute.rs new file mode 100644 index 000000000..7b6ae0c6b --- /dev/null +++ b/crates/brk_computer/src/supply/inflation/compute.rs @@ -0,0 +1,48 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::Vecs; +use crate::{blocks, distribution, utils::OptionExt, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + blocks: &blocks::Vecs, + distribution: &distribution::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + // inflation = daily_subsidy / circulating_supply * 365 * 100 + let circulating_supply = &distribution + .utxo_cohorts + .all + .metrics + .supply + .indexes_to_supply; + + self.indexes.compute_all(starting_indexes, exit, |v| { + v.compute_transform2( + starting_indexes.dateindex, + blocks + .rewards + .indexes_to_subsidy + .sats + .dateindex + .unwrap_sum(), + circulating_supply.sats.dateindex.u(), + |(i, subsidy_1d_sum, supply, ..)| { + let inflation = if *supply > 0 { + 365.0 * *subsidy_1d_sum as f64 / *supply as f64 * 100.0 + } else { + 0.0 + }; + (i, inflation.into()) + }, + exit, + )?; + Ok(()) + })?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/supply/inflation/import.rs b/crates/brk_computer/src/supply/inflation/import.rs new file mode 100644 index 000000000..637f3a61f --- /dev/null +++ b/crates/brk_computer/src/supply/inflation/import.rs @@ -0,0 +1,32 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromDateIndex, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v0 = Version::ZERO; + + let indexes_to_inflation_rate = ComputedVecsFromDateIndex::forced_import( + db, + "inflation_rate", + Source::Compute, + version + v0, + indexes, + VecBuilderOptions::default().add_average(), + )?; + + Ok(Self { + indexes: indexes_to_inflation_rate, + }) + } +} diff --git a/crates/brk_computer/src/supply/inflation/mod.rs b/crates/brk_computer/src/supply/inflation/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/supply/inflation/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/supply/inflation/vecs.rs b/crates/brk_computer/src/supply/inflation/vecs.rs new file mode 100644 index 000000000..02bef6cb7 --- /dev/null +++ b/crates/brk_computer/src/supply/inflation/vecs.rs @@ -0,0 +1,10 @@ +use brk_traversable::Traversable; +use brk_types::StoredF32; + +use crate::internal::ComputedVecsFromDateIndex; + +/// Inflation rate metrics +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes: ComputedVecsFromDateIndex, +} diff --git a/crates/brk_computer/src/supply/market_cap/import.rs b/crates/brk_computer/src/supply/market_cap/import.rs new file mode 100644 index 000000000..a25af611e --- /dev/null +++ b/crates/brk_computer/src/supply/market_cap/import.rs @@ -0,0 +1,41 @@ +use brk_types::Version; +use vecdb::{IterableCloneableVec, LazyVecFrom1}; + +use super::Vecs; +use crate::{ + distribution, + internal::{DollarsIdentity, LazyVecsFromDateIndex}, +}; + +impl Vecs { + pub fn import(version: Version, distribution: &distribution::Vecs) -> Self { + let v0 = Version::ZERO; + let supply_metrics = &distribution.utxo_cohorts.all.metrics.supply; + + // Market cap by height (lazy from distribution's supply in USD) + let height = supply_metrics + .height_to_supply_value + .dollars + .as_ref() + .map(|d| { + LazyVecFrom1::init( + "market_cap", + version + v0, + d.boxed_clone(), + |height, iter| iter.get(height), + ) + }); + + // Market cap by indexes (lazy from distribution's supply in USD) + let indexes = supply_metrics.indexes_to_supply.dollars.as_ref().map(|d| { + LazyVecsFromDateIndex::from_computed::( + "market_cap", + version + v0, + d.dateindex.as_ref().map(|v| v.boxed_clone()), + d, + ) + }); + + Self { height, indexes } + } +} diff --git a/crates/brk_computer/src/supply/market_cap/mod.rs b/crates/brk_computer/src/supply/market_cap/mod.rs new file mode 100644 index 000000000..f8623047a --- /dev/null +++ b/crates/brk_computer/src/supply/market_cap/mod.rs @@ -0,0 +1,4 @@ +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/supply/market_cap/vecs.rs b/crates/brk_computer/src/supply/market_cap/vecs.rs new file mode 100644 index 000000000..6ebfe490d --- /dev/null +++ b/crates/brk_computer/src/supply/market_cap/vecs.rs @@ -0,0 +1,13 @@ +use brk_traversable::Traversable; +use brk_types::{Dollars, Height}; +use vecdb::LazyVecFrom1; + +use crate::internal::LazyVecsFromDateIndex; + +/// Market cap metrics - lazy references to supply in USD +/// (market_cap = circulating_supply * price, already computed in distribution) +#[derive(Clone, Traversable)] +pub struct Vecs { + pub height: Option>, + pub indexes: Option>, +} diff --git a/crates/brk_computer/src/supply/mod.rs b/crates/brk_computer/src/supply/mod.rs new file mode 100644 index 000000000..7932d598b --- /dev/null +++ b/crates/brk_computer/src/supply/mod.rs @@ -0,0 +1,13 @@ +pub mod burned; +pub mod circulating; +pub mod inflation; +pub mod market_cap; +pub mod velocity; + +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; + +pub const DB_NAME: &str = "supply"; diff --git a/crates/brk_computer/src/supply/vecs.rs b/crates/brk_computer/src/supply/vecs.rs new file mode 100644 index 000000000..b0048b540 --- /dev/null +++ b/crates/brk_computer/src/supply/vecs.rs @@ -0,0 +1,24 @@ +use brk_traversable::Traversable; +use vecdb::Database; + +use super::{burned, circulating, inflation, market_cap, velocity}; + +/// Supply metrics module +/// +/// This module owns all supply-related metrics: +/// - circulating: Lazy references to distribution's actual circulating supply +/// - burned: Cumulative opreturn and unspendable supply +/// - inflation: Inflation rate derived from supply +/// - velocity: BTC and USD velocity metrics +/// - market_cap: Lazy references to supply in USD (circulating * price) +#[derive(Clone, Traversable)] +pub struct Vecs { + #[traversable(skip)] + pub(crate) db: Database, + + pub circulating: circulating::Vecs, + pub burned: burned::Vecs, + pub inflation: inflation::Vecs, + pub velocity: velocity::Vecs, + pub market_cap: market_cap::Vecs, +} diff --git a/crates/brk_computer/src/supply/velocity/compute.rs b/crates/brk_computer/src/supply/velocity/compute.rs new file mode 100644 index 000000000..ea42c88db --- /dev/null +++ b/crates/brk_computer/src/supply/velocity/compute.rs @@ -0,0 +1,61 @@ +use brk_error::Result; +use vecdb::Exit; + +use super::Vecs; +use crate::{distribution, transactions, utils::OptionExt, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + transactions: &transactions::Vecs, + distribution: &distribution::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + // velocity = annualized_volume / circulating_supply + let circulating_supply = &distribution + .utxo_cohorts + .all + .metrics + .supply + .indexes_to_supply; + + // BTC velocity + self.indexes_to_btc + .compute_all(starting_indexes, exit, |v| { + v.compute_divide( + starting_indexes.dateindex, + transactions + .volume + .indexes_to_annualized_volume_btc + .dateindex + .u(), + circulating_supply.bitcoin.dateindex.u(), + exit, + )?; + Ok(()) + })?; + + // USD velocity + if let Some(usd_velocity) = self.indexes_to_usd.as_mut() + && let Some(volume_usd) = transactions + .volume + .indexes_to_annualized_volume_usd + .dateindex + .as_ref() + && let Some(supply_usd) = circulating_supply.dollars.as_ref() + { + usd_velocity.compute_all(starting_indexes, exit, |v| { + v.compute_divide( + starting_indexes.dateindex, + volume_usd, + supply_usd.dateindex.u(), + exit, + )?; + Ok(()) + })?; + } + + Ok(()) + } +} diff --git a/crates/brk_computer/src/supply/velocity/import.rs b/crates/brk_computer/src/supply/velocity/import.rs new file mode 100644 index 000000000..8fda54071 --- /dev/null +++ b/crates/brk_computer/src/supply/velocity/import.rs @@ -0,0 +1,46 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromDateIndex, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + compute_dollars: bool, + ) -> Result { + let v0 = Version::ZERO; + + let indexes_to_btc = ComputedVecsFromDateIndex::forced_import( + db, + "btc_velocity", + Source::Compute, + version + v0, + indexes, + VecBuilderOptions::default().add_average(), + )?; + + let indexes_to_usd = compute_dollars.then(|| { + ComputedVecsFromDateIndex::forced_import( + db, + "usd_velocity", + Source::Compute, + version + v0, + indexes, + VecBuilderOptions::default().add_average(), + ) + .unwrap() + }); + + Ok(Self { + indexes_to_btc, + indexes_to_usd, + }) + } +} diff --git a/crates/brk_computer/src/supply/velocity/mod.rs b/crates/brk_computer/src/supply/velocity/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/supply/velocity/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/supply/velocity/vecs.rs b/crates/brk_computer/src/supply/velocity/vecs.rs new file mode 100644 index 000000000..1e4e4ec6c --- /dev/null +++ b/crates/brk_computer/src/supply/velocity/vecs.rs @@ -0,0 +1,11 @@ +use brk_traversable::Traversable; +use brk_types::StoredF64; + +use crate::internal::ComputedVecsFromDateIndex; + +/// Velocity metrics (annualized volume / circulating supply) +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_btc: ComputedVecsFromDateIndex, + pub indexes_to_usd: Option>, +} diff --git a/crates/brk_computer/src/traits.rs b/crates/brk_computer/src/traits.rs index a899f3666..dccd4d75a 100644 --- a/crates/brk_computer/src/traits.rs +++ b/crates/brk_computer/src/traits.rs @@ -32,9 +32,7 @@ impl ComputeDCAStackViaLen for EagerVec> { len: usize, exit: &Exit, ) -> Result<()> { - self.validate_computed_version_or_reset( - Version::ZERO + self.inner_version() + closes.version(), - )?; + self.validate_computed_version_or_reset(closes.version())?; let index = max_from.to_usize().min(self.len()); @@ -74,7 +72,8 @@ impl ComputeDCAStackViaLen for EagerVec> { self.truncate_push_at(i, stack) })?; - self.safe_write(exit)?; + let _lock = exit.lock(); + self.write()?; Ok(()) } @@ -86,9 +85,7 @@ impl ComputeDCAStackViaLen for EagerVec> { from: DateIndex, exit: &Exit, ) -> Result<()> { - self.validate_computed_version_or_reset( - Version::ZERO + self.inner_version() + closes.version(), - )?; + self.validate_computed_version_or_reset(closes.version())?; let from = from.to_usize(); let index = max_from.min(DateIndex::from(self.len())); @@ -118,7 +115,8 @@ impl ComputeDCAStackViaLen for EagerVec> { self.truncate_push_at(i, stack) })?; - self.safe_write(exit)?; + let _lock = exit.lock(); + self.write()?; Ok(()) } @@ -150,9 +148,7 @@ impl ComputeDCAAveragePriceViaLen for EagerVec> { len: usize, exit: &Exit, ) -> Result<()> { - self.validate_computed_version_or_reset( - Version::ONE + self.inner_version() + stacks.version(), - )?; + self.validate_computed_version_or_reset(Version::ONE + stacks.version())?; let index = max_from.min(DateIndex::from(self.len())); @@ -176,7 +172,8 @@ impl ComputeDCAAveragePriceViaLen for EagerVec> { self.truncate_push_at(i, avg_price) })?; - self.safe_write(exit)?; + let _lock = exit.lock(); + self.write()?; Ok(()) } @@ -188,9 +185,7 @@ impl ComputeDCAAveragePriceViaLen for EagerVec> { from: DateIndex, exit: &Exit, ) -> Result<()> { - self.validate_computed_version_or_reset( - Version::ZERO + self.inner_version() + stacks.version(), - )?; + self.validate_computed_version_or_reset(stacks.version())?; let index = max_from.min(DateIndex::from(self.len())); @@ -208,7 +203,8 @@ impl ComputeDCAAveragePriceViaLen for EagerVec> { self.truncate_push_at(i, avg_price) })?; - self.safe_write(exit)?; + let _lock = exit.lock(); + self.write()?; Ok(()) } diff --git a/crates/brk_computer/src/transactions/compute.rs b/crates/brk_computer/src/transactions/compute.rs new file mode 100644 index 000000000..99eaa1600 --- /dev/null +++ b/crates/brk_computer/src/transactions/compute.rs @@ -0,0 +1,61 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use crate::{indexes, inputs, outputs, price, ComputeIndexes}; + +use super::Vecs; + +impl Vecs { + #[allow(clippy::too_many_arguments)] + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + inputs: &inputs::Vecs, + outputs: &outputs::Vecs, + starting_indexes: &ComputeIndexes, + price: Option<&price::Vecs>, + exit: &Exit, + ) -> Result<()> { + // Count computes first + self.count + .compute(indexer, indexes, starting_indexes, exit)?; + + // Versions depends on count + self.versions + .compute(indexer, indexes, starting_indexes, exit)?; + + // Size computes next + self.size + .compute(indexer, indexes, starting_indexes, exit)?; + + // Fees depends on size + self.fees.compute( + indexer, + indexes, + inputs, + &self.size, + starting_indexes, + price, + exit, + )?; + + // Volume depends on fees and input/output counts + self.volume.compute( + indexer, + indexes, + &self.count, + &self.fees, + &inputs.count, + &outputs.count, + starting_indexes, + price, + exit, + )?; + + let _lock = exit.lock(); + self.db.compact()?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/transactions/count/compute.rs b/crates/brk_computer/src/transactions/count/compute.rs new file mode 100644 index 000000000..e8a2c1fd4 --- /dev/null +++ b/crates/brk_computer/src/transactions/count/compute.rs @@ -0,0 +1,29 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use super::Vecs; +use crate::{indexes, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.indexes_to_tx_count + .compute_all(indexes, starting_indexes, exit, |v| { + v.compute_count_from_indexes( + starting_indexes.height, + &indexer.vecs.tx.height_to_first_txindex, + &indexer.vecs.tx.txindex_to_txid, + exit, + )?; + Ok(()) + })?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/transactions/count/import.rs b/crates/brk_computer/src/transactions/count/import.rs new file mode 100644 index 000000000..84dbd0647 --- /dev/null +++ b/crates/brk_computer/src/transactions/count/import.rs @@ -0,0 +1,55 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{StoredBool, TxIndex, Version}; +use vecdb::{Database, IterableCloneableVec, LazyVecFrom2}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexer: &Indexer, + indexes: &indexes::Vecs, + ) -> Result { + let v0 = Version::ZERO; + + let full_stats = || { + VecBuilderOptions::default() + .add_average() + .add_minmax() + .add_percentiles() + .add_sum() + .add_cumulative() + }; + + let txindex_to_is_coinbase = LazyVecFrom2::init( + "is_coinbase", + version + v0, + indexer.vecs.tx.txindex_to_height.boxed_clone(), + indexer.vecs.tx.height_to_first_txindex.boxed_clone(), + |index: TxIndex, txindex_to_height_iter, height_to_first_txindex_iter| { + txindex_to_height_iter.get(index).map(|height| { + let txindex = height_to_first_txindex_iter.get_unwrap(height); + StoredBool::from(index == txindex) + }) + }, + ); + + Ok(Self { + indexes_to_tx_count: ComputedVecsFromHeight::forced_import( + db, + "tx_count", + Source::Compute, + version + v0, + indexes, + full_stats(), + )?, + txindex_to_is_coinbase, + }) + } +} diff --git a/crates/brk_computer/src/transactions/count/mod.rs b/crates/brk_computer/src/transactions/count/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/transactions/count/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/transactions/count/vecs.rs b/crates/brk_computer/src/transactions/count/vecs.rs new file mode 100644 index 000000000..b1c5e30e2 --- /dev/null +++ b/crates/brk_computer/src/transactions/count/vecs.rs @@ -0,0 +1,11 @@ +use brk_traversable::Traversable; +use brk_types::{Height, StoredBool, StoredU64, TxIndex}; +use vecdb::LazyVecFrom2; + +use crate::internal::ComputedVecsFromHeight; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_tx_count: ComputedVecsFromHeight, + pub txindex_to_is_coinbase: LazyVecFrom2, +} diff --git a/crates/brk_computer/src/transactions/fees/compute.rs b/crates/brk_computer/src/transactions/fees/compute.rs new file mode 100644 index 000000000..ef3682c1c --- /dev/null +++ b/crates/brk_computer/src/transactions/fees/compute.rs @@ -0,0 +1,80 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{FeeRate, Sats}; +use vecdb::{Exit, unlikely}; + +use super::Vecs; +use super::super::size; +use crate::{indexes, inputs, price, ComputeIndexes}; + +impl Vecs { + #[allow(clippy::too_many_arguments)] + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + txins: &inputs::Vecs, + size_vecs: &size::Vecs, + starting_indexes: &ComputeIndexes, + price: Option<&price::Vecs>, + exit: &Exit, + ) -> Result<()> { + self.txindex_to_input_value.compute_sum_from_indexes( + starting_indexes.txindex, + &indexer.vecs.tx.txindex_to_first_txinindex, + &indexes.transaction.txindex_to_input_count, + &txins.spent.txinindex_to_value, + exit, + )?; + + self.txindex_to_output_value.compute_sum_from_indexes( + starting_indexes.txindex, + &indexer.vecs.tx.txindex_to_first_txoutindex, + &indexes.transaction.txindex_to_output_count, + &indexer.vecs.txout.txoutindex_to_value, + exit, + )?; + + self.txindex_to_fee.compute_transform2( + starting_indexes.txindex, + &self.txindex_to_input_value, + &self.txindex_to_output_value, + |(i, input, output, ..)| { + let fee = if unlikely(input.is_max()) { + Sats::ZERO + } else { + input - output + }; + (i, fee) + }, + exit, + )?; + + self.txindex_to_fee_rate.compute_transform2( + starting_indexes.txindex, + &self.txindex_to_fee, + &size_vecs.txindex_to_vsize, + |(txindex, fee, vsize, ..)| (txindex, FeeRate::from((fee, vsize))), + exit, + )?; + + self.indexes_to_fee.compute_rest( + indexer, + indexes, + starting_indexes, + exit, + Some(&self.txindex_to_fee), + price, + )?; + + self.indexes_to_fee_rate.compute_rest( + indexer, + indexes, + starting_indexes, + exit, + Some(&self.txindex_to_fee_rate), + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/transactions/fees/import.rs b/crates/brk_computer/src/transactions/fees/import.rs new file mode 100644 index 000000000..38ebbbbf6 --- /dev/null +++ b/crates/brk_computer/src/transactions/fees/import.rs @@ -0,0 +1,64 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::Version; +use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec}; + +use super::Vecs; +use crate::{ + indexes, price, + internal::{ComputedValueVecsFromTxindex, ComputedVecsFromTxindex, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexer: &Indexer, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + ) -> Result { + let v0 = Version::ZERO; + + let stats = || { + VecBuilderOptions::default() + .add_average() + .add_minmax() + .add_percentiles() + }; + + let txindex_to_input_value = EagerVec::forced_import(db, "input_value", version + v0)?; + let txindex_to_output_value = EagerVec::forced_import(db, "output_value", version + v0)?; + let txindex_to_fee = EagerVec::forced_import(db, "fee", version + v0)?; + let txindex_to_fee_rate = EagerVec::forced_import(db, "fee_rate", version + v0)?; + + Ok(Self { + txindex_to_input_value, + txindex_to_output_value, + txindex_to_fee: txindex_to_fee.clone(), + txindex_to_fee_rate: txindex_to_fee_rate.clone(), + indexes_to_fee: ComputedValueVecsFromTxindex::forced_import( + db, + "fee", + indexer, + indexes, + Source::Vec(txindex_to_fee.boxed_clone()), + version + v0, + price, + VecBuilderOptions::default() + .add_sum() + .add_cumulative() + .add_percentiles() + .add_minmax() + .add_average(), + )?, + indexes_to_fee_rate: ComputedVecsFromTxindex::forced_import( + db, + "fee_rate", + Source::Vec(txindex_to_fee_rate.boxed_clone()), + version + v0, + indexes, + stats(), + )?, + }) + } +} diff --git a/crates/brk_computer/src/transactions/fees/mod.rs b/crates/brk_computer/src/transactions/fees/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/transactions/fees/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/transactions/fees/vecs.rs b/crates/brk_computer/src/transactions/fees/vecs.rs new file mode 100644 index 000000000..f5fca6697 --- /dev/null +++ b/crates/brk_computer/src/transactions/fees/vecs.rs @@ -0,0 +1,15 @@ +use brk_traversable::Traversable; +use brk_types::{FeeRate, Sats, TxIndex}; +use vecdb::{EagerVec, PcoVec}; + +use crate::internal::{ComputedValueVecsFromTxindex, ComputedVecsFromTxindex}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub txindex_to_input_value: EagerVec>, + pub txindex_to_output_value: EagerVec>, + pub txindex_to_fee: EagerVec>, + pub txindex_to_fee_rate: EagerVec>, + pub indexes_to_fee: ComputedValueVecsFromTxindex, + pub indexes_to_fee_rate: ComputedVecsFromTxindex, +} diff --git a/crates/brk_computer/src/chain/import.rs b/crates/brk_computer/src/transactions/import.rs similarity index 58% rename from crates/brk_computer/src/chain/import.rs rename to crates/brk_computer/src/transactions/import.rs index ab8a8f450..04b8abcef 100644 --- a/crates/brk_computer/src/chain/import.rs +++ b/crates/brk_computer/src/transactions/import.rs @@ -8,10 +8,7 @@ use vecdb::{Database, PAGE_SIZE}; use crate::{indexes, price}; -use super::{ - BlockVecs, CoinbaseVecs, EpochVecs, MiningVecs, OutputTypeVecs, TransactionVecs, Vecs, - VolumeVecs, -}; +use super::{CountVecs, FeesVecs, SizeVecs, Vecs, VersionsVecs, VolumeVecs}; impl Vecs { pub fn forced_import( @@ -27,22 +24,18 @@ impl Vecs { let version = parent_version + Version::ZERO; let compute_dollars = price.is_some(); - let block = BlockVecs::forced_import(&db, version, indexer, indexes)?; - let epoch = EpochVecs::forced_import(&db, version, indexes)?; - let mining = MiningVecs::forced_import(&db, version, indexer, indexes)?; - let coinbase = CoinbaseVecs::forced_import(&db, version, indexes, compute_dollars)?; - let transaction = TransactionVecs::forced_import(&db, version, indexer, indexes, price)?; - let output_type = OutputTypeVecs::forced_import(&db, version, indexes)?; + let count = CountVecs::forced_import(&db, version, indexer, indexes)?; + let size = SizeVecs::forced_import(&db, version, indexer, indexes)?; + let fees = FeesVecs::forced_import(&db, version, indexer, indexes, price)?; + let versions = VersionsVecs::forced_import(&db, version, indexes)?; let volume = VolumeVecs::forced_import(&db, version, indexes, compute_dollars)?; let this = Self { db, - block, - epoch, - mining, - coinbase, - transaction, - output_type, + count, + size, + fees, + versions, volume, }; diff --git a/crates/brk_computer/src/transactions/mod.rs b/crates/brk_computer/src/transactions/mod.rs new file mode 100644 index 000000000..6d95959b0 --- /dev/null +++ b/crates/brk_computer/src/transactions/mod.rs @@ -0,0 +1,31 @@ +pub mod count; +pub mod fees; +pub mod size; +pub mod versions; +pub mod volume; + +mod compute; +mod import; + +use brk_traversable::Traversable; +use vecdb::Database; + +pub use count::Vecs as CountVecs; +pub use fees::Vecs as FeesVecs; +pub use size::Vecs as SizeVecs; +pub use versions::Vecs as VersionsVecs; +pub use volume::Vecs as VolumeVecs; + +pub const DB_NAME: &str = "transactions"; + +#[derive(Clone, Traversable)] +pub struct Vecs { + #[traversable(skip)] + pub(crate) db: Database, + + pub count: CountVecs, + pub size: SizeVecs, + pub fees: FeesVecs, + pub versions: VersionsVecs, + pub volume: VolumeVecs, +} diff --git a/crates/brk_computer/src/transactions/size/compute.rs b/crates/brk_computer/src/transactions/size/compute.rs new file mode 100644 index 000000000..ec89eef25 --- /dev/null +++ b/crates/brk_computer/src/transactions/size/compute.rs @@ -0,0 +1,34 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use vecdb::Exit; + +use super::Vecs; +use crate::{indexes, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.indexes_to_tx_weight.compute_rest( + indexer, + indexes, + starting_indexes, + exit, + Some(&self.txindex_to_weight), + )?; + + self.indexes_to_tx_vsize.compute_rest( + indexer, + indexes, + starting_indexes, + exit, + Some(&self.txindex_to_vsize), + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/transactions/size/import.rs b/crates/brk_computer/src/transactions/size/import.rs new file mode 100644 index 000000000..ceef37540 --- /dev/null +++ b/crates/brk_computer/src/transactions/size/import.rs @@ -0,0 +1,78 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{TxIndex, VSize, Version, Weight}; +use vecdb::{Database, IterableCloneableVec, LazyVecFrom2, VecIndex}; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromTxindex, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexer: &Indexer, + indexes: &indexes::Vecs, + ) -> Result { + let v0 = Version::ZERO; + + let stats = || { + VecBuilderOptions::default() + .add_average() + .add_minmax() + .add_percentiles() + }; + + let txindex_to_weight = LazyVecFrom2::init( + "weight", + version + v0, + indexer.vecs.tx.txindex_to_base_size.boxed_clone(), + indexer.vecs.tx.txindex_to_total_size.boxed_clone(), + |index: TxIndex, txindex_to_base_size_iter, txindex_to_total_size_iter| { + let index = index.to_usize(); + txindex_to_base_size_iter.get_at(index).map(|base_size| { + let total_size = txindex_to_total_size_iter.get_at_unwrap(index); + Weight::from_sizes(*base_size, *total_size) + }) + }, + ); + + // Derive directly from eager sources to avoid Lazy <- Lazy + let txindex_to_vsize = LazyVecFrom2::init( + "vsize", + version + v0, + indexer.vecs.tx.txindex_to_base_size.boxed_clone(), + indexer.vecs.tx.txindex_to_total_size.boxed_clone(), + |index: TxIndex, txindex_to_base_size_iter, txindex_to_total_size_iter| { + let index = index.to_usize(); + txindex_to_base_size_iter.get_at(index).map(|base_size| { + let total_size = txindex_to_total_size_iter.get_at_unwrap(index); + VSize::from(Weight::from_sizes(*base_size, *total_size)) + }) + }, + ); + + Ok(Self { + indexes_to_tx_vsize: ComputedVecsFromTxindex::forced_import( + db, + "tx_vsize", + Source::Vec(txindex_to_vsize.boxed_clone()), + version + v0, + indexes, + stats(), + )?, + indexes_to_tx_weight: ComputedVecsFromTxindex::forced_import( + db, + "tx_weight", + Source::Vec(txindex_to_weight.boxed_clone()), + version + v0, + indexes, + stats(), + )?, + txindex_to_vsize, + txindex_to_weight, + }) + } +} diff --git a/crates/brk_computer/src/transactions/size/mod.rs b/crates/brk_computer/src/transactions/size/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/transactions/size/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/transactions/size/vecs.rs b/crates/brk_computer/src/transactions/size/vecs.rs new file mode 100644 index 000000000..d4a49c568 --- /dev/null +++ b/crates/brk_computer/src/transactions/size/vecs.rs @@ -0,0 +1,14 @@ +use brk_traversable::Traversable; +use brk_types::{StoredU32, TxIndex, VSize, Weight}; +use vecdb::LazyVecFrom2; + +use crate::internal::ComputedVecsFromTxindex; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_tx_vsize: ComputedVecsFromTxindex, + pub indexes_to_tx_weight: ComputedVecsFromTxindex, + // Both derive directly from eager sources (base_size, total_size) to avoid Lazy <- Lazy + pub txindex_to_vsize: LazyVecFrom2, + pub txindex_to_weight: LazyVecFrom2, +} diff --git a/crates/brk_computer/src/transactions/versions/compute.rs b/crates/brk_computer/src/transactions/versions/compute.rs new file mode 100644 index 000000000..7866ae3fb --- /dev/null +++ b/crates/brk_computer/src/transactions/versions/compute.rs @@ -0,0 +1,40 @@ +use brk_error::Result; +use brk_indexer::Indexer; +use brk_types::{StoredU64, TxVersion}; +use vecdb::{Exit, TypedVecIterator}; + +use super::Vecs; +use crate::{indexes, internal::ComputedVecsFromHeight, ComputeIndexes}; + +impl Vecs { + pub fn compute( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + let compute_indexes_to_tx_vany = + |indexes_to_tx_vany: &mut ComputedVecsFromHeight, txversion: TxVersion| { + let mut txindex_to_txversion_iter = indexer.vecs.tx.txindex_to_txversion.iter()?; + indexes_to_tx_vany.compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_filtered_count_from_indexes( + starting_indexes.height, + &indexer.vecs.tx.height_to_first_txindex, + &indexer.vecs.tx.txindex_to_txid, + |txindex| { + let v = txindex_to_txversion_iter.get_unwrap(txindex); + v == txversion + }, + exit, + )?; + Ok(()) + }) + }; + compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v1, TxVersion::ONE)?; + compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v2, TxVersion::TWO)?; + compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v3, TxVersion::THREE)?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/transactions/versions/import.rs b/crates/brk_computer/src/transactions/versions/import.rs new file mode 100644 index 000000000..5cdfe4122 --- /dev/null +++ b/crates/brk_computer/src/transactions/versions/import.rs @@ -0,0 +1,47 @@ +use brk_error::Result; +use brk_types::Version; +use vecdb::Database; + +use super::Vecs; +use crate::{ + indexes, + internal::{ComputedVecsFromHeight, Source, VecBuilderOptions}, +}; + +impl Vecs { + pub fn forced_import( + db: &Database, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v0 = Version::ZERO; + let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative(); + + Ok(Self { + indexes_to_tx_v1: ComputedVecsFromHeight::forced_import( + db, + "tx_v1", + Source::Compute, + version + v0, + indexes, + sum_cum(), + )?, + indexes_to_tx_v2: ComputedVecsFromHeight::forced_import( + db, + "tx_v2", + Source::Compute, + version + v0, + indexes, + sum_cum(), + )?, + indexes_to_tx_v3: ComputedVecsFromHeight::forced_import( + db, + "tx_v3", + Source::Compute, + version + v0, + indexes, + sum_cum(), + )?, + }) + } +} diff --git a/crates/brk_computer/src/transactions/versions/mod.rs b/crates/brk_computer/src/transactions/versions/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/transactions/versions/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/transactions/versions/vecs.rs b/crates/brk_computer/src/transactions/versions/vecs.rs new file mode 100644 index 000000000..acb9e4243 --- /dev/null +++ b/crates/brk_computer/src/transactions/versions/vecs.rs @@ -0,0 +1,11 @@ +use brk_traversable::Traversable; +use brk_types::StoredU64; + +use crate::internal::ComputedVecsFromHeight; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub indexes_to_tx_v1: ComputedVecsFromHeight, + pub indexes_to_tx_v2: ComputedVecsFromHeight, + pub indexes_to_tx_v3: ComputedVecsFromHeight, +} diff --git a/crates/brk_computer/src/chain/volume/compute.rs b/crates/brk_computer/src/transactions/volume/compute.rs similarity index 53% rename from crates/brk_computer/src/chain/volume/compute.rs rename to crates/brk_computer/src/transactions/volume/compute.rs index 6c55c3d15..db16c2089 100644 --- a/crates/brk_computer/src/chain/volume/compute.rs +++ b/crates/brk_computer/src/transactions/volume/compute.rs @@ -1,13 +1,11 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::ONE_DAY_IN_SEC_F64; +use brk_types::{StoredF32, ONE_DAY_IN_SEC_F64}; use vecdb::Exit; use super::Vecs; -use crate::{ - chain::{coinbase, transaction}, - indexes, price, ComputeIndexes, -}; +use super::super::{count, fees}; +use crate::{indexes, inputs, outputs, price, ComputeIndexes}; impl Vecs { #[allow(clippy::too_many_arguments)] @@ -15,8 +13,10 @@ impl Vecs { &mut self, indexer: &Indexer, indexes: &indexes::Vecs, - transaction_vecs: &transaction::Vecs, - coinbase_vecs: &coinbase::Vecs, + count_vecs: &count::Vecs, + fees_vecs: &fees::Vecs, + inputs_count: &inputs::CountVecs, + outputs_count: &outputs::CountVecs, starting_indexes: &ComputeIndexes, price: Option<&price::Vecs>, exit: &Exit, @@ -27,7 +27,7 @@ impl Vecs { starting_indexes.height, &indexer.vecs.tx.height_to_first_txindex, &indexes.block.height_to_txindex_count, - &transaction_vecs.txindex_to_input_value, + &fees_vecs.txindex_to_input_value, |sats| !sats.is_max(), exit, )?; @@ -56,24 +56,6 @@ impl Vecs { Ok(()) })?; - self.indexes_to_tx_btc_velocity - .compute_all(starting_indexes, exit, |v| { - v.compute_divide( - starting_indexes.dateindex, - self.indexes_to_annualized_volume_btc - .dateindex - .as_ref() - .unwrap(), - coinbase_vecs - .indexes_to_subsidy - .bitcoin - .dateindex - .unwrap_cumulative(), - exit, - )?; - Ok(()) - })?; - if let Some(indexes_to_sent_sum) = self.indexes_to_sent_sum.dollars.as_ref() { self.indexes_to_annualized_volume_usd .compute_all(starting_indexes, exit, |v| { @@ -85,39 +67,22 @@ impl Vecs { )?; Ok(()) })?; - - self.indexes_to_tx_usd_velocity - .compute_all(starting_indexes, exit, |v| { - v.compute_divide( - starting_indexes.dateindex, - self.indexes_to_annualized_volume_usd - .dateindex - .as_ref() - .unwrap(), - coinbase_vecs - .indexes_to_subsidy - .dollars - .as_ref() - .unwrap() - .dateindex - .unwrap_cumulative(), - exit, - )?; - Ok(()) - })?; } self.indexes_to_tx_per_sec .compute_all(starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.dateindex, - transaction_vecs.indexes_to_tx_count.dateindex.unwrap_sum(), + count_vecs.indexes_to_tx_count.dateindex.unwrap_sum(), &indexes.time.dateindex_to_date, |(i, tx_count, date, ..)| { - ( - i, - (*tx_count as f64 / (date.completion() * ONE_DAY_IN_SEC_F64)).into(), - ) + let completion = date.completion(); + let per_sec = if completion == 0.0 { + StoredF32::NAN + } else { + StoredF32::from(*tx_count as f64 / (completion * ONE_DAY_IN_SEC_F64)) + }; + (i, per_sec) }, exit, )?; @@ -128,16 +93,16 @@ impl Vecs { .compute_all(starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.dateindex, - transaction_vecs - .indexes_to_input_count - .dateindex - .unwrap_sum(), + inputs_count.indexes_to_count.dateindex.unwrap_sum(), &indexes.time.dateindex_to_date, - |(i, tx_count, date, ..)| { - ( - i, - (*tx_count as f64 / (date.completion() * ONE_DAY_IN_SEC_F64)).into(), - ) + |(i, input_count, date, ..)| { + let completion = date.completion(); + let per_sec = if completion == 0.0 { + StoredF32::NAN + } else { + StoredF32::from(*input_count as f64 / (completion * ONE_DAY_IN_SEC_F64)) + }; + (i, per_sec) }, exit, )?; @@ -148,16 +113,16 @@ impl Vecs { .compute_all(starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.dateindex, - transaction_vecs - .indexes_to_output_count - .dateindex - .unwrap_sum(), + outputs_count.indexes_to_count.dateindex.unwrap_sum(), &indexes.time.dateindex_to_date, - |(i, tx_count, date, ..)| { - ( - i, - (*tx_count as f64 / (date.completion() * ONE_DAY_IN_SEC_F64)).into(), - ) + |(i, output_count, date, ..)| { + let completion = date.completion(); + let per_sec = if completion == 0.0 { + StoredF32::NAN + } else { + StoredF32::from(*output_count as f64 / (completion * ONE_DAY_IN_SEC_F64)) + }; + (i, per_sec) }, exit, )?; diff --git a/crates/brk_computer/src/chain/volume/import.rs b/crates/brk_computer/src/transactions/volume/import.rs similarity index 79% rename from crates/brk_computer/src/chain/volume/import.rs rename to crates/brk_computer/src/transactions/volume/import.rs index 536eddb9c..f8e7715a7 100644 --- a/crates/brk_computer/src/chain/volume/import.rs +++ b/crates/brk_computer/src/transactions/volume/import.rs @@ -4,7 +4,7 @@ use vecdb::Database; use super::Vecs; use crate::{ - grouped::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, Source, VecBuilderOptions}, + internal::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, Source, VecBuilderOptions}, indexes, }; @@ -53,22 +53,6 @@ impl Vecs { indexes, last(), )?, - indexes_to_tx_btc_velocity: ComputedVecsFromDateIndex::forced_import( - db, - "tx_btc_velocity", - Source::Compute, - version + v0, - indexes, - last(), - )?, - indexes_to_tx_usd_velocity: ComputedVecsFromDateIndex::forced_import( - db, - "tx_usd_velocity", - Source::Compute, - version + v0, - indexes, - last(), - )?, indexes_to_tx_per_sec: ComputedVecsFromDateIndex::forced_import( db, "tx_per_sec", diff --git a/crates/brk_computer/src/transactions/volume/mod.rs b/crates/brk_computer/src/transactions/volume/mod.rs new file mode 100644 index 000000000..1136f9ebd --- /dev/null +++ b/crates/brk_computer/src/transactions/volume/mod.rs @@ -0,0 +1,5 @@ +mod compute; +mod import; +mod vecs; + +pub use vecs::Vecs; diff --git a/crates/brk_computer/src/chain/volume/vecs.rs b/crates/brk_computer/src/transactions/volume/vecs.rs similarity index 64% rename from crates/brk_computer/src/chain/volume/vecs.rs rename to crates/brk_computer/src/transactions/volume/vecs.rs index b1b5c9034..b02d521c5 100644 --- a/crates/brk_computer/src/chain/volume/vecs.rs +++ b/crates/brk_computer/src/transactions/volume/vecs.rs @@ -1,17 +1,15 @@ use brk_traversable::Traversable; -use brk_types::{Bitcoin, Dollars, Sats, StoredF32, StoredF64}; +use brk_types::{Bitcoin, Dollars, Sats, StoredF32}; -use crate::grouped::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex}; +use crate::internal::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex}; -/// Volume and velocity metrics +/// Volume metrics #[derive(Clone, Traversable)] pub struct Vecs { pub indexes_to_sent_sum: ComputedValueVecsFromHeight, pub indexes_to_annualized_volume: ComputedVecsFromDateIndex, pub indexes_to_annualized_volume_btc: ComputedVecsFromDateIndex, pub indexes_to_annualized_volume_usd: ComputedVecsFromDateIndex, - pub indexes_to_tx_btc_velocity: ComputedVecsFromDateIndex, - pub indexes_to_tx_usd_velocity: ComputedVecsFromDateIndex, pub indexes_to_tx_per_sec: ComputedVecsFromDateIndex, pub indexes_to_outputs_per_sec: ComputedVecsFromDateIndex, pub indexes_to_inputs_per_sec: ComputedVecsFromDateIndex, diff --git a/crates/brk_fetcher/src/binance.rs b/crates/brk_fetcher/src/binance.rs index b4d19b683..6e54cdecf 100644 --- a/crates/brk_fetcher/src/binance.rs +++ b/crates/brk_fetcher/src/binance.rs @@ -73,7 +73,8 @@ impl Binance { default_retry(|_| { let url = Self::url("interval=1m&limit=1000"); info!("Fetching {url} ..."); - let json: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?; + let json: Value = + serde_json::from_slice(minreq::get(url).with_timeout(30).send()?.as_bytes())?; Self::parse_ohlc_array(&json) }) } @@ -95,7 +96,8 @@ impl Binance { default_retry(|_| { let url = Self::url("interval=1d"); info!("Fetching {url} ..."); - let json: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?; + let json: Value = + serde_json::from_slice(minreq::get(url).with_timeout(30).send()?.as_bytes())?; Self::parse_date_ohlc_array(&json) }) } @@ -205,6 +207,7 @@ impl Binance { pub fn ping() -> Result<()> { minreq::get("https://api.binance.com/api/v3/ping") + .with_timeout(10) .send()?; Ok(()) } diff --git a/crates/brk_fetcher/src/brk.rs b/crates/brk_fetcher/src/brk.rs index 3f56e8f17..9a2f89c7e 100644 --- a/crates/brk_fetcher/src/brk.rs +++ b/crates/brk_fetcher/src/brk.rs @@ -46,7 +46,8 @@ impl BRK { ); info!("Fetching {url} ..."); - let body: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?; + let body: Value = + serde_json::from_slice(minreq::get(url).with_timeout(30).send()?.as_bytes())?; body.as_array() .ok_or(Error::Parse("Expected JSON array".into()))? @@ -86,7 +87,8 @@ impl BRK { ); info!("Fetching {url}..."); - let body: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?; + let body: Value = + serde_json::from_slice(minreq::get(url).with_timeout(30).send()?.as_bytes())?; body.as_array() .ok_or(Error::Parse("Expected JSON array".into()))? @@ -120,6 +122,7 @@ impl BRK { pub fn ping() -> Result<()> { minreq::get(API_URL) + .with_timeout(10) .send()?; Ok(()) } diff --git a/crates/brk_fetcher/src/kraken.rs b/crates/brk_fetcher/src/kraken.rs index 8f764d2aa..6fa11ed30 100644 --- a/crates/brk_fetcher/src/kraken.rs +++ b/crates/brk_fetcher/src/kraken.rs @@ -39,7 +39,8 @@ impl Kraken { default_retry(|_| { let url = Self::url(1); info!("Fetching {url} ..."); - let json: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?; + let json: Value = + serde_json::from_slice(minreq::get(url).with_timeout(30).send()?.as_bytes())?; Self::parse_ohlc_response(&json) }) } @@ -60,7 +61,8 @@ impl Kraken { default_retry(|_| { let url = Self::url(1440); info!("Fetching {url} ..."); - let json: Value = serde_json::from_slice(minreq::get(url).send()?.as_bytes())?; + let json: Value = + serde_json::from_slice(minreq::get(url).with_timeout(30).send()?.as_bytes())?; Self::parse_date_ohlc_response(&json) }) } @@ -96,6 +98,7 @@ impl Kraken { pub fn ping() -> Result<()> { minreq::get("https://api.kraken.com/0/public/Time") + .with_timeout(10) .send()?; Ok(()) } diff --git a/crates/brk_logger/Cargo.toml b/crates/brk_logger/Cargo.toml index 4facc3dfc..eb52ce5de 100644 --- a/crates/brk_logger/Cargo.toml +++ b/crates/brk_logger/Cargo.toml @@ -9,7 +9,7 @@ repository.workspace = true build = "build.rs" [dependencies] -env_logger = "0.11.8" +env_logger = { workspace = true } jiff = { workspace = true } log = { workspace = true } owo-colors = "4.2.3" diff --git a/crates/brk_logger/src/lib.rs b/crates/brk_logger/src/lib.rs index 72a5446e5..abc5a4a04 100644 --- a/crates/brk_logger/src/lib.rs +++ b/crates/brk_logger/src/lib.rs @@ -27,50 +27,56 @@ pub fn init(path: Option<&Path>) -> io::Result<()> { LOG_FILE.set(Mutex::new(BufWriter::new(file))).ok(); } - Builder::from_env(Env::default().default_filter_or( - "info,bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,rustls=off", - // "debug,fjall=trace,bitcoin=off,bitcoincore-rpc=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,rustls=off", - )) - .format(move |buf, record| { - let date_time = Timestamp::now() - .to_zoned(tz::TimeZone::system()) - .strftime("%Y-%m-%d %H:%M:%S") - .to_string(); - let level = record.level().as_str().to_lowercase(); - let level = format!("{level:5}"); - let target = record.target(); - let dash = "-"; - let args = record.args(); + #[cfg(debug_assertions)] + let default_level = "debug"; + #[cfg(not(debug_assertions))] + let default_level = "info"; - if let Some(hook) = LOG_HOOK.get() { - hook(&args.to_string()); - } + let filter = format!( + "{default_level},bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,rustls=off,notify=off,oxc_resolver=off,tower_http=off" + ); - if let Some(file) = LOG_FILE.get() { - let _ = write(&mut *file.lock(), &date_time, target, &level, dash, args); - } + Builder::from_env(Env::default().default_filter_or(filter)) + .format(move |buf, record| { + let date_time = Timestamp::now() + .to_zoned(tz::TimeZone::system()) + .strftime("%Y-%m-%d %H:%M:%S") + .to_string(); + let level = record.level().as_str().to_lowercase(); + let level = format!("{level:5}"); + let target = record.target(); + let dash = "-"; + let args = record.args(); - let colored_date_time = date_time.bright_black(); - let colored_level = match level.chars().next().unwrap() { - 'e' => level.red().to_string(), - 'w' => level.yellow().to_string(), - 'i' => level.green().to_string(), - 'd' => level.blue().to_string(), - 't' => level.cyan().to_string(), - _ => panic!(), - }; - let colored_dash = dash.bright_black(); + if let Some(hook) = LOG_HOOK.get() { + hook(&args.to_string()); + } - write( - buf, - colored_date_time, - target, - colored_level, - colored_dash, - args, - ) - }) - .init(); + if let Some(file) = LOG_FILE.get() { + let _ = write(&mut *file.lock(), &date_time, target, &level, dash, args); + } + + let colored_date_time = date_time.bright_black(); + let colored_level = match level.chars().next().unwrap() { + 'e' => level.red().to_string(), + 'w' => level.yellow().to_string(), + 'i' => level.green().to_string(), + 'd' => level.blue().to_string(), + 't' => level.cyan().to_string(), + _ => panic!(), + }; + let colored_dash = dash.bright_black(); + + write( + buf, + colored_date_time, + target, + colored_level, + colored_dash, + args, + ) + }) + .init(); Ok(()) } diff --git a/crates/brk_mcp/src/lib.rs b/crates/brk_mcp/src/lib.rs index 147e6df34..0d6b93891 100644 --- a/crates/brk_mcp/src/lib.rs +++ b/crates/brk_mcp/src/lib.rs @@ -61,7 +61,7 @@ impl MCP { _ => format!("{}{}", self.base_url, params.path), }; - match minreq::get(&url).send() { + match minreq::get(&url).with_timeout(30).send() { Ok(response) => { let body = response.as_str().unwrap_or("").to_string(); Ok(CallToolResult::success(vec![Content::text(body)])) diff --git a/crates/brk_mempool/src/sync.rs b/crates/brk_mempool/src/sync.rs index fb7314a5e..d89c1808e 100644 --- a/crates/brk_mempool/src/sync.rs +++ b/crates/brk_mempool/src/sync.rs @@ -4,14 +4,14 @@ use std::{ atomic::{AtomicBool, AtomicU64, Ordering}, }, thread, - time::{Duration, Instant, SystemTime, UNIX_EPOCH}, + time::{Duration, SystemTime, UNIX_EPOCH}, }; use brk_error::Result; use brk_rpc::Client; use brk_types::{MempoolEntryInfo, MempoolInfo, TxWithHex, Txid, TxidPrefix}; use derive_deref::Deref; -use log::{debug, error}; +use log::error; use parking_lot::{RwLock, RwLockReadGuard}; use rustc_hash::FxHashMap; @@ -224,9 +224,9 @@ impl MempoolInner { self.dirty.store(false, Ordering::Release); - let i = Instant::now(); + // let i = Instant::now(); self.rebuild_projected_blocks(); - debug!("mempool: rebuild_projected_blocks in {:?}", i.elapsed()); + // debug!("mempool: rebuild_projected_blocks in {:?}", i.elapsed()); } /// Rebuild projected blocks snapshot. diff --git a/crates/brk_query/src/impl/address.rs b/crates/brk_query/src/impl/address.rs index 51c78d00f..1366ada64 100644 --- a/crates/brk_query/src/impl/address.rs +++ b/crates/brk_query/src/impl/address.rs @@ -53,19 +53,19 @@ impl Query { }; let any_address_index = computer - .stateful + .distribution .any_address_indexes .get_once(outputtype, type_index)?; let address_data = match any_address_index.to_enum() { AnyAddressDataIndexEnum::Loaded(index) => computer - .stateful + .distribution .addresses_data .loaded .iter()? .get_unwrap(index), AnyAddressDataIndexEnum::Empty(index) => computer - .stateful + .distribution .addresses_data .empty .iter()? diff --git a/crates/brk_query/src/impl/block/raw.rs b/crates/brk_query/src/impl/block/raw.rs index 58b5a13f8..25ba3b46e 100644 --- a/crates/brk_query/src/impl/block/raw.rs +++ b/crates/brk_query/src/impl/block/raw.rs @@ -27,7 +27,7 @@ impl Query { return Err(Error::OutOfRange("Block height out of range".into())); } - let position = computer.blks.height_to_position.read_once(height)?; + let position = computer.positions.height_to_position.read_once(height)?; let size = indexer.vecs.block.height_to_total_size.read_once(height)?; reader.read_raw_bytes(position, *size as usize) diff --git a/crates/brk_query/src/impl/mining/block_fee_rates.rs b/crates/brk_query/src/impl/mining/block_fee_rates.rs index cfeff8848..a2d43537a 100644 --- a/crates/brk_query/src/impl/mining/block_fee_rates.rs +++ b/crates/brk_query/src/impl/mining/block_fee_rates.rs @@ -32,7 +32,7 @@ impl Query { // // let iter = DateIndexIter::new(computer, start, current_height.to_usize()); // - // let vecs = &computer.chain.indexes_to_fee_rate.dateindex; + // let vecs = &computer.transactions.transaction.indexes_to_fee_rate.dateindex; // let mut min = vecs.unwrap_min().iter(); // let mut pct10 = vecs.unwrap_pct10().iter(); // let mut pct25 = vecs.unwrap_pct25().iter(); diff --git a/crates/brk_query/src/impl/mining/block_fees.rs b/crates/brk_query/src/impl/mining/block_fees.rs index 7e27a68a4..4014f36c5 100644 --- a/crates/brk_query/src/impl/mining/block_fees.rs +++ b/crates/brk_query/src/impl/mining/block_fees.rs @@ -16,8 +16,8 @@ impl Query { let iter = DateIndexIter::new(computer, start, current_height.to_usize()); let mut fees = computer - .chain - .transaction + .transactions + .fees .indexes_to_fee .sats .dateindex diff --git a/crates/brk_query/src/impl/mining/block_rewards.rs b/crates/brk_query/src/impl/mining/block_rewards.rs index 35967937c..84db5a3fd 100644 --- a/crates/brk_query/src/impl/mining/block_rewards.rs +++ b/crates/brk_query/src/impl/mining/block_rewards.rs @@ -17,8 +17,8 @@ impl Query { // coinbase = subsidy + fees let mut rewards = computer - .chain - .coinbase + .blocks + .rewards .indexes_to_coinbase .sats .dateindex diff --git a/crates/brk_query/src/impl/mining/block_sizes.rs b/crates/brk_query/src/impl/mining/block_sizes.rs index 2dda7e28c..568c739e8 100644 --- a/crates/brk_query/src/impl/mining/block_sizes.rs +++ b/crates/brk_query/src/impl/mining/block_sizes.rs @@ -16,15 +16,15 @@ impl Query { let iter = DateIndexIter::new(computer, start, current_height.to_usize()); let mut sizes_vec = computer - .chain - .block + .blocks + .size .indexes_to_block_size .dateindex .unwrap_average() .iter(); let mut weights_vec = computer - .chain - .block + .blocks + .weight .indexes_to_block_weight .dateindex .unwrap_average() diff --git a/crates/brk_query/src/impl/mining/dateindex_iter.rs b/crates/brk_query/src/impl/mining/dateindex_iter.rs index 5147bea8c..cece78765 100644 --- a/crates/brk_query/src/impl/mining/dateindex_iter.rs +++ b/crates/brk_query/src/impl/mining/dateindex_iter.rs @@ -48,8 +48,8 @@ impl<'a> DateIndexIter<'a> { + 1; let mut timestamps = self .computer - .chain - .block + .blocks + .time .timeindexes_to_timestamp .dateindex .as_ref() diff --git a/crates/brk_query/src/impl/mining/difficulty.rs b/crates/brk_query/src/impl/mining/difficulty.rs index 9052765c6..6a8fc6e72 100644 --- a/crates/brk_query/src/impl/mining/difficulty.rs +++ b/crates/brk_query/src/impl/mining/difficulty.rs @@ -43,8 +43,8 @@ impl Query { // Get timestamps using difficultyepoch_to_timestamp for epoch start let epoch_start_timestamp = computer - .chain - .block + .blocks + .time .difficultyepoch_to_timestamp .read_once(current_epoch)?; let current_timestamp = indexer diff --git a/crates/brk_query/src/impl/mining/epochs.rs b/crates/brk_query/src/impl/mining/epochs.rs index f88b8f8f1..6c93dad21 100644 --- a/crates/brk_query/src/impl/mining/epochs.rs +++ b/crates/brk_query/src/impl/mining/epochs.rs @@ -22,9 +22,9 @@ pub fn iter_difficulty_epochs( .unwrap_or_default(); let mut epoch_to_height_iter = computer.indexes.block.difficultyepoch_to_first_height.iter(); - let mut epoch_to_timestamp_iter = computer.chain.block.difficultyepoch_to_timestamp.iter(); + let mut epoch_to_timestamp_iter = computer.blocks.time.difficultyepoch_to_timestamp.iter(); let mut epoch_to_difficulty_iter = computer - .chain + .blocks .mining .indexes_to_difficulty .difficultyepoch diff --git a/crates/brk_query/src/impl/mining/hashrate.rs b/crates/brk_query/src/impl/mining/hashrate.rs index 665393225..57445f7c6 100644 --- a/crates/brk_query/src/impl/mining/hashrate.rs +++ b/crates/brk_query/src/impl/mining/hashrate.rs @@ -26,7 +26,7 @@ impl Query { .read_once(current_height)?; let current_hashrate = *computer - .chain + .blocks .mining .indexes_to_hash_rate .dateindex @@ -57,7 +57,7 @@ impl Query { // Create iterators for the loop let mut hashrate_iter = computer - .chain + .blocks .mining .indexes_to_hash_rate .dateindex @@ -65,8 +65,8 @@ impl Query { .iter(); let mut timestamp_iter = computer - .chain - .block + .blocks + .time .timeindexes_to_timestamp .dateindex .as_ref() diff --git a/crates/brk_query/src/impl/mining/reward_stats.rs b/crates/brk_query/src/impl/mining/reward_stats.rs index 91178cbc4..e3d1d2342 100644 --- a/crates/brk_query/src/impl/mining/reward_stats.rs +++ b/crates/brk_query/src/impl/mining/reward_stats.rs @@ -13,8 +13,8 @@ impl Query { let start_block = Height::from(current_height.to_usize().saturating_sub(block_count - 1)); let mut coinbase_iter = computer - .chain - .coinbase + .blocks + .rewards .indexes_to_coinbase .sats .height @@ -22,16 +22,16 @@ impl Query { .unwrap() .iter(); let mut fee_iter = computer - .chain - .transaction + .transactions + .fees .indexes_to_fee .sats .height .unwrap_sum() .iter(); let mut tx_count_iter = computer - .chain - .transaction + .transactions + .count .indexes_to_tx_count .height .as_ref() diff --git a/crates/brk_query/src/impl/transaction.rs b/crates/brk_query/src/impl/transaction.rs index c091410b7..e30b10409 100644 --- a/crates/brk_query/src/impl/transaction.rs +++ b/crates/brk_query/src/impl/transaction.rs @@ -121,7 +121,8 @@ impl Query { // Look up spend status let computer = self.computer(); let txinindex = computer - .txouts + .outputs + .spent .txoutindex_to_txinindex .read_once(txoutindex)?; @@ -168,7 +169,7 @@ impl Query { // Get spend status for each output let computer = self.computer(); - let mut txoutindex_to_txinindex_iter = computer.txouts.txoutindex_to_txinindex.iter()?; + let mut txoutindex_to_txinindex_iter = computer.outputs.spent.txoutindex_to_txinindex.iter()?; let mut outspends = Vec::with_capacity(output_count); for i in 0..output_count { @@ -203,7 +204,7 @@ impl Query { .tx .txindex_to_first_txinindex .read_once(txindex)?; - let position = computer.blks.txindex_to_position.read_once(txindex)?; + let position = computer.positions.txindex_to_position.read_once(txindex)?; // Get block info for status let block_hash = indexer.vecs.block.height_to_blockhash.read_once(height)?; @@ -313,7 +314,7 @@ impl Query { let computer = self.computer(); let total_size = indexer.vecs.tx.txindex_to_total_size.read_once(txindex)?; - let position = computer.blks.txindex_to_position.read_once(txindex)?; + let position = computer.positions.txindex_to_position.read_once(txindex)?; let buffer = reader.read_raw_bytes(position, *total_size as usize)?; diff --git a/crates/brk_rpc/src/lib.rs b/crates/brk_rpc/src/lib.rs index 67294f7cb..68c331e52 100644 --- a/crates/brk_rpc/src/lib.rs +++ b/crates/brk_rpc/src/lib.rs @@ -72,7 +72,7 @@ impl Client { /// Returns the numbers of block in the longest chain. pub fn get_last_height(&self) -> Result { - debug!("Get last height..."); + // debug!("Get last height..."); self.call(|c| c.get_block_count()) .map(Height::from) .map_err(Into::into) @@ -262,10 +262,13 @@ impl Client { return Ok((block_info.height.into(), hash)); } - let mut hash = block_info - .previous_block_hash - .map(BlockHash::from) - .ok_or(Error::NotFound("Genesis block has no previous block".into()))?; + let mut hash = + block_info + .previous_block_hash + .map(BlockHash::from) + .ok_or(Error::NotFound( + "Genesis block has no previous block".into(), + ))?; loop { if self.is_in_main_chain(&hash)? { @@ -277,7 +280,9 @@ impl Client { hash = info .previous_block_hash .map(BlockHash::from) - .ok_or(Error::NotFound("Reached genesis without finding main chain".into()))?; + .ok_or(Error::NotFound( + "Reached genesis without finding main chain".into(), + ))?; } } Err(_) => Err(Error::NotFound("Block hash not found in blockchain".into())), diff --git a/crates/brk_server/Cargo.toml b/crates/brk_server/Cargo.toml index 3f0c275e2..d7fa60c6d 100644 --- a/crates/brk_server/Cargo.toml +++ b/crates/brk_server/Cargo.toml @@ -11,7 +11,7 @@ build = "build.rs" [dependencies] aide = { workspace = true } axum = { workspace = true } -brk_binder = { workspace = true } +brk_bindgen = { workspace = true } brk_computer = { workspace = true } brk_error = { workspace = true } brk_fetcher = { workspace = true } diff --git a/crates/brk_server/src/lib.rs b/crates/brk_server/src/lib.rs index 52b673ce1..a997d3ad3 100644 --- a/crates/brk_server/src/lib.rs +++ b/crates/brk_server/src/lib.rs @@ -140,14 +140,14 @@ impl Server { .and_then(|p| p.parent()) .unwrap() .into(); - let output_paths = brk_binder::ClientOutputPaths::new() + let output_paths = brk_bindgen::ClientOutputPaths::new() .rust(workspace_root.join("crates/brk_client/src/lib.rs")) .javascript(workspace_root.join("modules/brk-client/index.js")) .python(workspace_root.join("packages/brk_client/brk_client/__init__.py")); let openapi_json = Arc::new(serde_json::to_string(&openapi).unwrap()); let result = panic::catch_unwind(panic::AssertUnwindSafe(|| { - brk_binder::generate_clients(vecs, &openapi_json, &output_paths) + brk_bindgen::generate_clients(vecs, &openapi_json, &output_paths) })); match result { diff --git a/crates/brk_types/src/feerate.rs b/crates/brk_types/src/feerate.rs index 865d045a3..6bf10604b 100644 --- a/crates/brk_types/src/feerate.rs +++ b/crates/brk_types/src/feerate.rs @@ -29,6 +29,9 @@ impl From<(Sats, VSize)> for FeeRate { } let sats = u64::from(sats); let vsize = u64::from(vsize); + if vsize == 0 { + return Self(f64::NAN); + } Self((sats * 1000).div_ceil(vsize) as f64 / 1000.0) } } @@ -62,7 +65,11 @@ impl AddAssign for FeeRate { impl Div for FeeRate { type Output = Self; fn div(self, rhs: usize) -> Self::Output { - Self(self.0 / rhs as f64) + if rhs == 0 { + Self(f64::NAN) + } else { + Self(self.0 / rhs as f64) + } } } diff --git a/crates/brk_types/src/height.rs b/crates/brk_types/src/height.rs index adbe2bca8..c77d053ff 100644 --- a/crates/brk_types/src/height.rs +++ b/crates/brk_types/src/height.rs @@ -1,5 +1,6 @@ use std::{ fmt::Debug, + fs, ops::{Add, AddAssign, Rem}, }; @@ -45,7 +46,7 @@ impl Height { } pub fn write(&self, path: &std::path::Path) -> Result<(), std::io::Error> { - std::fs::write(path, self.to_bytes()) + fs::write(path, self.to_bytes()) } pub fn increment(&mut self) { diff --git a/crates/brk_types/src/sats.rs b/crates/brk_types/src/sats.rs index 93edd064d..c81755c97 100644 --- a/crates/brk_types/src/sats.rs +++ b/crates/brk_types/src/sats.rs @@ -199,7 +199,11 @@ impl Div for Sats { impl Div for Sats { type Output = Self; fn div(self, rhs: usize) -> Self::Output { - Self(self.0 / rhs as u64) + if rhs == 0 { + Self::ZERO + } else { + Self(self.0 / rhs as u64) + } } } diff --git a/crates/brk_types/src/stored_f32.rs b/crates/brk_types/src/stored_f32.rs index dcc45c326..7002864e6 100644 --- a/crates/brk_types/src/stored_f32.rs +++ b/crates/brk_types/src/stored_f32.rs @@ -90,14 +90,23 @@ impl CheckedSub for StoredF32 { impl Div for StoredF32 { type Output = Self; fn div(self, rhs: usize) -> Self::Output { - Self(self.0 / rhs as f32) + if rhs == 0 { + Self::NAN + } else { + Self(self.0 / rhs as f32) + } } } impl Div for StoredF32 { type Output = Self; fn div(self, rhs: StoredU32) -> Self::Output { - Self(self.0 / f32::from(rhs)) + let rhs = f32::from(rhs); + if rhs == 0.0 { + Self::NAN + } else { + Self(self.0 / rhs) + } } } @@ -138,14 +147,23 @@ impl From> for StoredF32 { impl Div for StoredF32 { type Output = Self; fn div(self, rhs: Dollars) -> Self::Output { - Self::from(self.0 as f64 / *rhs) + let rhs = *rhs; + if rhs == 0.0 { + Self::NAN + } else { + Self::from(self.0 as f64 / rhs) + } } } impl Div for StoredF32 { type Output = Self; fn div(self, rhs: StoredF32) -> Self::Output { - Self::from(self.0 / rhs.0) + if rhs.0 == 0.0 { + Self::NAN + } else { + Self::from(self.0 / rhs.0) + } } } diff --git a/crates/brk_types/src/stored_f64.rs b/crates/brk_types/src/stored_f64.rs index 49b828455..2011c7085 100644 --- a/crates/brk_types/src/stored_f64.rs +++ b/crates/brk_types/src/stored_f64.rs @@ -85,21 +85,34 @@ impl Mul for StoredF64 { impl Div for StoredF64 { type Output = Self; fn div(self, rhs: usize) -> Self::Output { - Self(self.0 / rhs as f64) + if rhs == 0 { + Self::NAN + } else { + Self(self.0 / rhs as f64) + } } } impl Div for StoredF64 { type Output = Self; fn div(self, rhs: Self) -> Self::Output { - Self(self.0 / rhs.0) + if rhs.0 == 0.0 { + Self::NAN + } else { + Self(self.0 / rhs.0) + } } } impl Div for StoredF64 { type Output = Self; fn div(self, rhs: Dollars) -> Self::Output { - Self::from(self.0 / *rhs) + let rhs = *rhs; + if rhs == 0.0 { + Self::NAN + } else { + Self(self.0 / rhs) + } } } @@ -201,7 +214,12 @@ impl Sum for StoredF64 { impl Div for StoredF64 { type Output = Self; fn div(self, rhs: Bitcoin) -> Self::Output { - Self(self.0 / f64::from(rhs)) + let rhs = f64::from(rhs); + if rhs == 0.0 { + Self::NAN + } else { + Self(self.0 / rhs) + } } } diff --git a/crates/brk_types/src/weight.rs b/crates/brk_types/src/weight.rs index 8aaa03bff..6b1eb6ef9 100644 --- a/crates/brk_types/src/weight.rs +++ b/crates/brk_types/src/weight.rs @@ -23,9 +23,32 @@ use vecdb::{Formattable, Pco}; pub struct Weight(u64); impl Weight { + /// Maximum block weight in Bitcoin (4 million weight units). + /// Note: Pre-SegWit 1MB blocks have weight = size * 4 = 4M, so this is consistent across all blocks. + pub const MAX_BLOCK: Self = Self(4_000_000); + + /// Compute weight from base size and total size. + /// Formula: weight = base_size * 3 + total_size + /// (since total_size = base_size + witness_size, this equals base_size * 4 + witness_size) + #[inline] + pub fn from_sizes(base_size: u32, total_size: u32) -> Self { + let wu = base_size as u64 * 3 + total_size as u64; + Self(wu) + } + pub fn to_vbytes_ceil(&self) -> u64 { bitcoin::Weight::from(*self).to_vbytes_ceil() } + + pub fn to_vbytes_floor(&self) -> u64 { + bitcoin::Weight::from(*self).to_vbytes_floor() + } + + /// Returns block fullness as a percentage (0-100+) relative to MAX_BLOCK. + #[inline] + pub fn fullness(&self) -> f32 { + (self.0 as f64 / Self::MAX_BLOCK.0 as f64 * 100.0) as f32 + } } impl From for Weight { diff --git a/modules/brk-client/index.js b/modules/brk-client/index.js index 2ff7b5fcf..a7f356356 100644 --- a/modules/brk-client/index.js +++ b/modules/brk-client/index.js @@ -1,898 +1,6 @@ // Auto-generated BRK JavaScript client // Do not edit manually -// Constants - -export const VERSION = "v0.1.0-alpha.1"; - -export const INDEXES = /** @type {const} */ ([ - "dateindex", - "decadeindex", - "difficultyepoch", - "emptyoutputindex", - "halvingepoch", - "height", - "txinindex", - "monthindex", - "opreturnindex", - "txoutindex", - "p2aaddressindex", - "p2msoutputindex", - "p2pk33addressindex", - "p2pk65addressindex", - "p2pkhaddressindex", - "p2shaddressindex", - "p2traddressindex", - "p2wpkhaddressindex", - "p2wshaddressindex", - "quarterindex", - "semesterindex", - "txindex", - "unknownoutputindex", - "weekindex", - "yearindex", - "loadedaddressindex", - "emptyaddressindex", -]); - -export const POOL_ID_TO_POOL_NAME = /** @type {const} */ ({ - pool175btc: "175btc", - onehash: "1Hash", - onem1x: "1M1X", - onethash: "1THash", - twentyoneinc: "21 Inc.", - pool50btc: "50BTC", - fiftyeightcoin: "58COIN", - sevenpool: "7pool", - eightbaochi: "8baochi", - axbt: "A-XBT", - aaopool: "AAO Pool", - antpool: "AntPool", - arkpool: "ArkPool", - asicminer: "ASICMiner", - batpool: "BATPOOL", - bcmonster: "BCMonster", - bcpoolio: "bcpool.io", - binancepool: "Binance Pool", - bitalo: "Bitalo", - bitclub: "BitClub", - bitcoinaffiliatenetwork: "Bitcoin Affiliate Network", - bitcoinindia: "Bitcoin India", - bitcoinukraine: "Bitcoin-Ukraine", - bitcoincom: "Bitcoin.com", - bitcoinrussia: "BitcoinRussia", - bitfarms: "Bitfarms", - bitfufupool: "BitFuFuPool", - bitfury: "BitFury", - bitminter: "BitMinter", - bitparking: "Bitparking", - bitsolo: "Bitsolo", - bixin: "Bixin", - blockfills: "BlockFills", - braiinspool: "Braiins Pool", - bravomining: "Bravo Mining", - btcguild: "BTC Guild", - btcnuggets: "BTC Nuggets", - btcpoolparty: "BTC Pool Party", - btccom: "BTC.com", - btctop: "BTC.TOP", - btcc: "BTCC", - btcdig: "BTCDig", - btclab: "BTCLab", - btcmp: "BTCMP", - btcserv: "BTCServ", - btpool: "BTPOOL", - bwpool: "BWPool", - bytepool: "BytePool", - canoe: "CANOE", - canoepool: "CanoePool", - carbonnegative: "Carbon Negative", - ckpool: "CKPool", - cloudhashing: "CloudHashing", - coinlab: "CoinLab", - cointerra: "Cointerra", - connectbtc: "ConnectBTC", - dcex: "DCEX", - dcexploration: "DCExploration", - digitalbtc: "digitalBTC", - digitalxmintsy: "digitalX Mintsy", - dpool: "DPOOL", - eclipsemc: "EclipseMC", - ekanembtc: "EkanemBTC", - eligius: "Eligius", - emcdpool: "EMCDPool", - entrustcharitypool: "Entrust Charity Pool", - eobot: "Eobot", - exxbw: "EXX&BW", - f2pool: "F2Pool", - foundryusa: "Foundry USA", - futurebitapollosolo: "FutureBit Apollo Solo", - gbminers: "GBMiners", - ghashio: "GHash.IO", - givemecoins: "Give Me Coins", - gogreenlight: "GoGreenLight", - haominer: "haominer", - haozhuzhu: "HAOZHUZHU", - hashbx: "HashBX", - hashpool: "HASHPOOL", - helix: "Helix", - hhtt: "HHTT", - hotpool: "HotPool", - hummerpool: "Hummerpool", - huobipool: "Huobi.pool", - innopolistech: "Innopolis Tech", - kanopool: "KanoPool", - kncminer: "KnCMiner", - kucoinpool: "KuCoinPool", - lubiancom: "Lubian.com", - luckypool: "luckyPool", - luxor: "Luxor", - marapool: "MARA Pool", - maxbtc: "MaxBTC", - maxipool: "MaxiPool", - megabigpower: "MegaBigPower", - minerium: "Minerium", - miningsquared: "Mining Squared", - miningdutch: "Mining-Dutch", - miningcity: "MiningCity", - miningkings: "MiningKings", - mmpool: "mmpool", - mtred: "Mt Red", - multicoinco: "MultiCoin.co", - multipool: "Multipool", - mybtccoinpool: "myBTCcoin Pool", - neopool: "Neopool", - nexious: "Nexious", - nicehash: "NiceHash", - nmcbit: "NMCbit", - novablock: "NovaBlock", - ocean: "OCEAN", - okexpool: "OKExPool", - okkong: "OKKONG", - okminer: "OKMINER", - okpooltop: "okpool.top", - ozcoin: "OzCoin", - parasite: "Parasite", - patels: "Patels", - pegapool: "PEGA Pool", - phashio: "PHash.IO", - phoenix: "Phoenix", - polmine: "Polmine", - poolin: "Poolin", - portlandhodl: "Portland.HODL", - publicpool: "Public Pool", - purebtccom: "PureBTC.COM", - rawpool: "Rawpool", - rigpool: "RigPool", - sbicrypto: "SBI Crypto", - secpool: "SECPOOL", - secretsuperstar: "SecretSuperstar", - shawnp0wers: "shawnp0wers", - sigmapoolcom: "Sigmapool.com", - simplecoinus: "simplecoin.us", - solock: "Solo CK", - spiderpool: "SpiderPool", - stminingcorp: "ST Mining Corp", - tangpool: "Tangpool", - tatmaspool: "TATMAS Pool", - tbdice: "TBDice", - telco214: "Telco 214", - terrapool: "Terra Pool", - tiger: "tiger", - tigerpoolnet: "tigerpool.net", - titan: "Titan", - transactioncoinmining: "transactioncoinmining", - trickysbtcpool: "Tricky's BTC Pool", - triplemining: "TripleMining", - ultimuspool: "ULTIMUSPOOL", - unknown: "Unknown", - unomp: "UNOMP", - viabtc: "ViaBTC", - waterhole: "Waterhole", - wayicn: "WAYI.CN", - whitepool: "WhitePool", - wk057: "wk057", - yourbtcnet: "Yourbtc.net", - zulupool: "Zulupool", -}); - -// Cohort names - -export const TERM_NAMES = /** @type {const} */ ({ - "short": { - "id": "sth", - "short": "STH", - "long": "Short Term Holders" - }, - "long": { - "id": "lth", - "short": "LTH", - "long": "Long Term Holders" - } -}); - -export const EPOCH_NAMES = /** @type {const} */ ({ - "_0": { - "id": "epoch_0", - "short": "Epoch 0", - "long": "Epoch 0" - }, - "_1": { - "id": "epoch_1", - "short": "Epoch 1", - "long": "Epoch 1" - }, - "_2": { - "id": "epoch_2", - "short": "Epoch 2", - "long": "Epoch 2" - }, - "_3": { - "id": "epoch_3", - "short": "Epoch 3", - "long": "Epoch 3" - }, - "_4": { - "id": "epoch_4", - "short": "Epoch 4", - "long": "Epoch 4" - } -}); - -export const YEAR_NAMES = /** @type {const} */ ({ - "_2009": { - "id": "year_2009", - "short": "2009", - "long": "Year 2009" - }, - "_2010": { - "id": "year_2010", - "short": "2010", - "long": "Year 2010" - }, - "_2011": { - "id": "year_2011", - "short": "2011", - "long": "Year 2011" - }, - "_2012": { - "id": "year_2012", - "short": "2012", - "long": "Year 2012" - }, - "_2013": { - "id": "year_2013", - "short": "2013", - "long": "Year 2013" - }, - "_2014": { - "id": "year_2014", - "short": "2014", - "long": "Year 2014" - }, - "_2015": { - "id": "year_2015", - "short": "2015", - "long": "Year 2015" - }, - "_2016": { - "id": "year_2016", - "short": "2016", - "long": "Year 2016" - }, - "_2017": { - "id": "year_2017", - "short": "2017", - "long": "Year 2017" - }, - "_2018": { - "id": "year_2018", - "short": "2018", - "long": "Year 2018" - }, - "_2019": { - "id": "year_2019", - "short": "2019", - "long": "Year 2019" - }, - "_2020": { - "id": "year_2020", - "short": "2020", - "long": "Year 2020" - }, - "_2021": { - "id": "year_2021", - "short": "2021", - "long": "Year 2021" - }, - "_2022": { - "id": "year_2022", - "short": "2022", - "long": "Year 2022" - }, - "_2023": { - "id": "year_2023", - "short": "2023", - "long": "Year 2023" - }, - "_2024": { - "id": "year_2024", - "short": "2024", - "long": "Year 2024" - }, - "_2025": { - "id": "year_2025", - "short": "2025", - "long": "Year 2025" - }, - "_2026": { - "id": "year_2026", - "short": "2026", - "long": "Year 2026" - } -}); - -export const SPENDABLE_TYPE_NAMES = /** @type {const} */ ({ - "p2pk65": { - "id": "p2pk65", - "short": "P2PK65", - "long": "Pay to Public Key (65 bytes)" - }, - "p2pk33": { - "id": "p2pk33", - "short": "P2PK33", - "long": "Pay to Public Key (33 bytes)" - }, - "p2pkh": { - "id": "p2pkh", - "short": "P2PKH", - "long": "Pay to Public Key Hash" - }, - "p2ms": { - "id": "p2ms", - "short": "P2MS", - "long": "Pay to Multisig" - }, - "p2sh": { - "id": "p2sh", - "short": "P2SH", - "long": "Pay to Script Hash" - }, - "p2wpkh": { - "id": "p2wpkh", - "short": "P2WPKH", - "long": "Pay to Witness Public Key Hash" - }, - "p2wsh": { - "id": "p2wsh", - "short": "P2WSH", - "long": "Pay to Witness Script Hash" - }, - "p2tr": { - "id": "p2tr", - "short": "P2TR", - "long": "Pay to Taproot" - }, - "p2a": { - "id": "p2a", - "short": "P2A", - "long": "Pay to Anchor" - }, - "unknown": { - "id": "unknown_outputs", - "short": "Unknown", - "long": "Unknown Output Type" - }, - "empty": { - "id": "empty_outputs", - "short": "Empty", - "long": "Empty Output" - } -}); - -export const AGE_RANGE_NAMES = /** @type {const} */ ({ - "up_to_1d": { - "id": "up_to_1d_old", - "short": "<1d", - "long": "Up to 1 Day Old" - }, - "_1d_to_1w": { - "id": "at_least_1d_up_to_1w_old", - "short": "1d-1w", - "long": "1 Day to 1 Week Old" - }, - "_1w_to_1m": { - "id": "at_least_1w_up_to_1m_old", - "short": "1w-1m", - "long": "1 Week to 1 Month Old" - }, - "_1m_to_2m": { - "id": "at_least_1m_up_to_2m_old", - "short": "1m-2m", - "long": "1 to 2 Months Old" - }, - "_2m_to_3m": { - "id": "at_least_2m_up_to_3m_old", - "short": "2m-3m", - "long": "2 to 3 Months Old" - }, - "_3m_to_4m": { - "id": "at_least_3m_up_to_4m_old", - "short": "3m-4m", - "long": "3 to 4 Months Old" - }, - "_4m_to_5m": { - "id": "at_least_4m_up_to_5m_old", - "short": "4m-5m", - "long": "4 to 5 Months Old" - }, - "_5m_to_6m": { - "id": "at_least_5m_up_to_6m_old", - "short": "5m-6m", - "long": "5 to 6 Months Old" - }, - "_6m_to_1y": { - "id": "at_least_6m_up_to_1y_old", - "short": "6m-1y", - "long": "6 Months to 1 Year Old" - }, - "_1y_to_2y": { - "id": "at_least_1y_up_to_2y_old", - "short": "1y-2y", - "long": "1 to 2 Years Old" - }, - "_2y_to_3y": { - "id": "at_least_2y_up_to_3y_old", - "short": "2y-3y", - "long": "2 to 3 Years Old" - }, - "_3y_to_4y": { - "id": "at_least_3y_up_to_4y_old", - "short": "3y-4y", - "long": "3 to 4 Years Old" - }, - "_4y_to_5y": { - "id": "at_least_4y_up_to_5y_old", - "short": "4y-5y", - "long": "4 to 5 Years Old" - }, - "_5y_to_6y": { - "id": "at_least_5y_up_to_6y_old", - "short": "5y-6y", - "long": "5 to 6 Years Old" - }, - "_6y_to_7y": { - "id": "at_least_6y_up_to_7y_old", - "short": "6y-7y", - "long": "6 to 7 Years Old" - }, - "_7y_to_8y": { - "id": "at_least_7y_up_to_8y_old", - "short": "7y-8y", - "long": "7 to 8 Years Old" - }, - "_8y_to_10y": { - "id": "at_least_8y_up_to_10y_old", - "short": "8y-10y", - "long": "8 to 10 Years Old" - }, - "_10y_to_12y": { - "id": "at_least_10y_up_to_12y_old", - "short": "10y-12y", - "long": "10 to 12 Years Old" - }, - "_12y_to_15y": { - "id": "at_least_12y_up_to_15y_old", - "short": "12y-15y", - "long": "12 to 15 Years Old" - }, - "from_15y": { - "id": "at_least_15y_old", - "short": "15y+", - "long": "15+ Years Old" - } -}); - -export const MAX_AGE_NAMES = /** @type {const} */ ({ - "_1w": { - "id": "up_to_1w_old", - "short": "<1w", - "long": "Up to 1 Week Old" - }, - "_1m": { - "id": "up_to_1m_old", - "short": "<1m", - "long": "Up to 1 Month Old" - }, - "_2m": { - "id": "up_to_2m_old", - "short": "<2m", - "long": "Up to 2 Months Old" - }, - "_3m": { - "id": "up_to_3m_old", - "short": "<3m", - "long": "Up to 3 Months Old" - }, - "_4m": { - "id": "up_to_4m_old", - "short": "<4m", - "long": "Up to 4 Months Old" - }, - "_5m": { - "id": "up_to_5m_old", - "short": "<5m", - "long": "Up to 5 Months Old" - }, - "_6m": { - "id": "up_to_6m_old", - "short": "<6m", - "long": "Up to 6 Months Old" - }, - "_1y": { - "id": "up_to_1y_old", - "short": "<1y", - "long": "Up to 1 Year Old" - }, - "_2y": { - "id": "up_to_2y_old", - "short": "<2y", - "long": "Up to 2 Years Old" - }, - "_3y": { - "id": "up_to_3y_old", - "short": "<3y", - "long": "Up to 3 Years Old" - }, - "_4y": { - "id": "up_to_4y_old", - "short": "<4y", - "long": "Up to 4 Years Old" - }, - "_5y": { - "id": "up_to_5y_old", - "short": "<5y", - "long": "Up to 5 Years Old" - }, - "_6y": { - "id": "up_to_6y_old", - "short": "<6y", - "long": "Up to 6 Years Old" - }, - "_7y": { - "id": "up_to_7y_old", - "short": "<7y", - "long": "Up to 7 Years Old" - }, - "_8y": { - "id": "up_to_8y_old", - "short": "<8y", - "long": "Up to 8 Years Old" - }, - "_10y": { - "id": "up_to_10y_old", - "short": "<10y", - "long": "Up to 10 Years Old" - }, - "_12y": { - "id": "up_to_12y_old", - "short": "<12y", - "long": "Up to 12 Years Old" - }, - "_15y": { - "id": "up_to_15y_old", - "short": "<15y", - "long": "Up to 15 Years Old" - } -}); - -export const MIN_AGE_NAMES = /** @type {const} */ ({ - "_1d": { - "id": "at_least_1d_old", - "short": "1d+", - "long": "At Least 1 Day Old" - }, - "_1w": { - "id": "at_least_1w_old", - "short": "1w+", - "long": "At Least 1 Week Old" - }, - "_1m": { - "id": "at_least_1m_old", - "short": "1m+", - "long": "At Least 1 Month Old" - }, - "_2m": { - "id": "at_least_2m_old", - "short": "2m+", - "long": "At Least 2 Months Old" - }, - "_3m": { - "id": "at_least_3m_old", - "short": "3m+", - "long": "At Least 3 Months Old" - }, - "_4m": { - "id": "at_least_4m_old", - "short": "4m+", - "long": "At Least 4 Months Old" - }, - "_5m": { - "id": "at_least_5m_old", - "short": "5m+", - "long": "At Least 5 Months Old" - }, - "_6m": { - "id": "at_least_6m_old", - "short": "6m+", - "long": "At Least 6 Months Old" - }, - "_1y": { - "id": "at_least_1y_old", - "short": "1y+", - "long": "At Least 1 Year Old" - }, - "_2y": { - "id": "at_least_2y_old", - "short": "2y+", - "long": "At Least 2 Years Old" - }, - "_3y": { - "id": "at_least_3y_old", - "short": "3y+", - "long": "At Least 3 Years Old" - }, - "_4y": { - "id": "at_least_4y_old", - "short": "4y+", - "long": "At Least 4 Years Old" - }, - "_5y": { - "id": "at_least_5y_old", - "short": "5y+", - "long": "At Least 5 Years Old" - }, - "_6y": { - "id": "at_least_6y_old", - "short": "6y+", - "long": "At Least 6 Years Old" - }, - "_7y": { - "id": "at_least_7y_old", - "short": "7y+", - "long": "At Least 7 Years Old" - }, - "_8y": { - "id": "at_least_8y_old", - "short": "8y+", - "long": "At Least 8 Years Old" - }, - "_10y": { - "id": "at_least_10y_old", - "short": "10y+", - "long": "At Least 10 Years Old" - }, - "_12y": { - "id": "at_least_12y_old", - "short": "12y+", - "long": "At Least 12 Years Old" - } -}); - -export const AMOUNT_RANGE_NAMES = /** @type {const} */ ({ - "_0sats": { - "id": "with_0sats", - "short": "0 sats", - "long": "0 Sats" - }, - "_1sat_to_10sats": { - "id": "above_1sat_under_10sats", - "short": "1-10 sats", - "long": "1 to 10 Sats" - }, - "_10sats_to_100sats": { - "id": "above_10sats_under_100sats", - "short": "10-100 sats", - "long": "10 to 100 Sats" - }, - "_100sats_to_1k_sats": { - "id": "above_100sats_under_1k_sats", - "short": "100-1k sats", - "long": "100 to 1K Sats" - }, - "_1k_sats_to_10k_sats": { - "id": "above_1k_sats_under_10k_sats", - "short": "1k-10k sats", - "long": "1K to 10K Sats" - }, - "_10k_sats_to_100k_sats": { - "id": "above_10k_sats_under_100k_sats", - "short": "10k-100k sats", - "long": "10K to 100K Sats" - }, - "_100k_sats_to_1m_sats": { - "id": "above_100k_sats_under_1m_sats", - "short": "100k-1M sats", - "long": "100K to 1M Sats" - }, - "_1m_sats_to_10m_sats": { - "id": "above_1m_sats_under_10m_sats", - "short": "1M-10M sats", - "long": "1M to 10M Sats" - }, - "_10m_sats_to_1btc": { - "id": "above_10m_sats_under_1btc", - "short": "0.1-1 BTC", - "long": "0.1 to 1 BTC" - }, - "_1btc_to_10btc": { - "id": "above_1btc_under_10btc", - "short": "1-10 BTC", - "long": "1 to 10 BTC" - }, - "_10btc_to_100btc": { - "id": "above_10btc_under_100btc", - "short": "10-100 BTC", - "long": "10 to 100 BTC" - }, - "_100btc_to_1k_btc": { - "id": "above_100btc_under_1k_btc", - "short": "100-1k BTC", - "long": "100 to 1K BTC" - }, - "_1k_btc_to_10k_btc": { - "id": "above_1k_btc_under_10k_btc", - "short": "1k-10k BTC", - "long": "1K to 10K BTC" - }, - "_10k_btc_to_100k_btc": { - "id": "above_10k_btc_under_100k_btc", - "short": "10k-100k BTC", - "long": "10K to 100K BTC" - }, - "_100k_btc_or_more": { - "id": "above_100k_btc", - "short": "100k+ BTC", - "long": "100K+ BTC" - } -}); - -export const GE_AMOUNT_NAMES = /** @type {const} */ ({ - "_1sat": { - "id": "above_1sat", - "short": "1+ sats", - "long": "Above 1 Sat" - }, - "_10sats": { - "id": "above_10sats", - "short": "10+ sats", - "long": "Above 10 Sats" - }, - "_100sats": { - "id": "above_100sats", - "short": "100+ sats", - "long": "Above 100 Sats" - }, - "_1k_sats": { - "id": "above_1k_sats", - "short": "1k+ sats", - "long": "Above 1K Sats" - }, - "_10k_sats": { - "id": "above_10k_sats", - "short": "10k+ sats", - "long": "Above 10K Sats" - }, - "_100k_sats": { - "id": "above_100k_sats", - "short": "100k+ sats", - "long": "Above 100K Sats" - }, - "_1m_sats": { - "id": "above_1m_sats", - "short": "1M+ sats", - "long": "Above 1M Sats" - }, - "_10m_sats": { - "id": "above_10m_sats", - "short": "0.1+ BTC", - "long": "Above 0.1 BTC" - }, - "_1btc": { - "id": "above_1btc", - "short": "1+ BTC", - "long": "Above 1 BTC" - }, - "_10btc": { - "id": "above_10btc", - "short": "10+ BTC", - "long": "Above 10 BTC" - }, - "_100btc": { - "id": "above_100btc", - "short": "100+ BTC", - "long": "Above 100 BTC" - }, - "_1k_btc": { - "id": "above_1k_btc", - "short": "1k+ BTC", - "long": "Above 1K BTC" - }, - "_10k_btc": { - "id": "above_10k_btc", - "short": "10k+ BTC", - "long": "Above 10K BTC" - } -}); - -export const LT_AMOUNT_NAMES = /** @type {const} */ ({ - "_10sats": { - "id": "under_10sats", - "short": "<10 sats", - "long": "Under 10 Sats" - }, - "_100sats": { - "id": "under_100sats", - "short": "<100 sats", - "long": "Under 100 Sats" - }, - "_1k_sats": { - "id": "under_1k_sats", - "short": "<1k sats", - "long": "Under 1K Sats" - }, - "_10k_sats": { - "id": "under_10k_sats", - "short": "<10k sats", - "long": "Under 10K Sats" - }, - "_100k_sats": { - "id": "under_100k_sats", - "short": "<100k sats", - "long": "Under 100K Sats" - }, - "_1m_sats": { - "id": "under_1m_sats", - "short": "<1M sats", - "long": "Under 1M Sats" - }, - "_10m_sats": { - "id": "under_10m_sats", - "short": "<0.1 BTC", - "long": "Under 0.1 BTC" - }, - "_1btc": { - "id": "under_1btc", - "short": "<1 BTC", - "long": "Under 1 BTC" - }, - "_10btc": { - "id": "under_10btc", - "short": "<10 BTC", - "long": "Under 10 BTC" - }, - "_100btc": { - "id": "under_100btc", - "short": "<100 BTC", - "long": "Under 100 BTC" - }, - "_1k_btc": { - "id": "under_1k_btc", - "short": "<1k BTC", - "long": "Under 1K BTC" - }, - "_10k_btc": { - "id": "under_10k_btc", - "short": "<10k BTC", - "long": "Under 10K BTC" - }, - "_100k_btc": { - "id": "under_100k_btc", - "short": "<100k BTC", - "long": "Under 100K BTC" - } -}); - // Type definitions /** @typedef {string} Address */ @@ -1416,7 +524,7 @@ export const LT_AMOUNT_NAMES = /** @type {const} */ ({ */ const _isBrowser = typeof window !== 'undefined' && 'caches' in window; -const _runIdle = (fn) => (globalThis.requestIdleCallback ?? setTimeout)(fn); +const _runIdle = (/** @type {VoidFunction} */ fn) => (globalThis.requestIdleCallback ?? setTimeout)(fn); /** @type {Promise} */ const _cachePromise = _isBrowser @@ -1439,42 +547,43 @@ class BrkError extends Error { } /** - * A metric node that can fetch data for different indexes. * @template T + * @typedef {Object} Endpoint + * @property {(onUpdate?: (value: T[]) => void) => Promise} get - Fetch all data points + * @property {(from?: number, to?: number, onUpdate?: (value: T[]) => void) => Promise} range - Fetch data in range + * @property {string} path - The endpoint path */ -class MetricNode { - /** - * @param {BrkClientBase} client - * @param {string} path - */ - constructor(client, path) { - this._client = client; - this._path = path; - } - /** - * Fetch all data points for this metric. - * @param {(value: T[]) => void} [onUpdate] - Called when data is available (may be called twice: cache then fresh) - * @returns {Promise} - */ - get(onUpdate) { - return this._client.get(this._path, onUpdate); - } +/** + * @template T + * @typedef {Object} MetricPattern + * @property {string} name - The metric name + * @property {Partial>>} by - Index endpoints (lazy getters) + * @property {() => Index[]} indexes - Get the list of available indexes + * @property {(index: Index) => Endpoint|undefined} get - Get an endpoint for a specific index + */ - /** - * Fetch data points within a range. - * @param {string | number} [from] - * @param {string | number} [to] - * @param {(value: T[]) => void} [onUpdate] - Called when data is available (may be called twice: cache then fresh) - * @returns {Promise} - */ - getRange(from, to, onUpdate) { - const params = new URLSearchParams(); - if (from !== undefined) params.set('from', String(from)); - if (to !== undefined) params.set('to', String(to)); - const query = params.toString(); - return this._client.get(query ? `${this._path}?${query}` : this._path, onUpdate); - } +/** + * Create an endpoint for a metric index. + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @param {Index} index - The index name + * @returns {Endpoint} + */ +function _endpoint(client, name, index) { + const p = `/api/metric/${name}/${index}`; + return { + get: (onUpdate) => client.get(p, onUpdate), + range: (from, to, onUpdate) => { + const params = new URLSearchParams(); + if (from !== undefined) params.set('from', String(from)); + if (to !== undefined) params.set('to', String(to)); + const query = params.toString(); + return client.get(query ? `${p}?${query}` : p, onUpdate); + }, + get path() { return p; }, + }; } /** @@ -1498,7 +607,8 @@ class BrkClientBase { * @returns {Promise} */ async get(path, onUpdate) { - const url = `${this.baseUrl}${path}`; + const base = this.baseUrl.endsWith('/') ? this.baseUrl.slice(0, -1) : this.baseUrl; + const url = `${base}${path}`; const cache = await _cachePromise; const cachedRes = await cache?.match(url); const cachedJson = cachedRes ? await cachedRes.json() : null; @@ -1526,901 +636,1403 @@ class BrkClientBase { } } +/** + * Build metric name with optional prefix. + * @param {string} acc - Accumulated prefix + * @param {string} s - Metric suffix + * @returns {string} + */ +const _m = (acc, s) => acc ? `${acc}_${s}` : s; + // Index accessor factory functions /** * @template T - * @typedef {Object} Indexes3By - * @property {MetricNode} dateindex - * @property {MetricNode} decadeindex - * @property {MetricNode} difficultyepoch - * @property {MetricNode} height - * @property {MetricNode} monthindex - * @property {MetricNode} quarterindex - * @property {MetricNode} semesterindex - * @property {MetricNode} weekindex - * @property {MetricNode} yearindex + * @typedef {{ name: string, by: { dateindex: Endpoint, decadeindex: Endpoint, difficultyepoch: Endpoint, height: Endpoint, monthindex: Endpoint, quarterindex: Endpoint, semesterindex: Endpoint, weekindex: Endpoint, yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern1 */ /** - * @template T - * @typedef {Object} Indexes3 - * @property {Indexes3By} by - */ - -/** - * Create a Indexes3 accessor + * Create a MetricPattern1 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes3} + * @param {string} name - The metric vec name + * @returns {MetricPattern1} */ -function createIndexes3(client, basePath) { +function createMetricPattern1(client, name) { return { + name, by: { - dateindex: new MetricNode(client, `${basePath}/dateindex`), - decadeindex: new MetricNode(client, `${basePath}/decadeindex`), - difficultyepoch: new MetricNode(client, `${basePath}/difficultyepoch`), - height: new MetricNode(client, `${basePath}/height`), - monthindex: new MetricNode(client, `${basePath}/monthindex`), - quarterindex: new MetricNode(client, `${basePath}/quarterindex`), - semesterindex: new MetricNode(client, `${basePath}/semesterindex`), - weekindex: new MetricNode(client, `${basePath}/weekindex`), - yearindex: new MetricNode(client, `${basePath}/yearindex`) + get dateindex() { return _endpoint(client, name, 'dateindex'); }, + get decadeindex() { return _endpoint(client, name, 'decadeindex'); }, + get difficultyepoch() { return _endpoint(client, name, 'difficultyepoch'); }, + get height() { return _endpoint(client, name, 'height'); }, + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get quarterindex() { return _endpoint(client, name, 'quarterindex'); }, + get semesterindex() { return _endpoint(client, name, 'semesterindex'); }, + get weekindex() { return _endpoint(client, name, 'weekindex'); }, + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['dateindex', 'decadeindex', 'difficultyepoch', 'height', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes4By - * @property {MetricNode} dateindex - * @property {MetricNode} decadeindex - * @property {MetricNode} difficultyepoch - * @property {MetricNode} monthindex - * @property {MetricNode} quarterindex - * @property {MetricNode} semesterindex - * @property {MetricNode} weekindex - * @property {MetricNode} yearindex + * @typedef {{ name: string, by: { dateindex: Endpoint, decadeindex: Endpoint, difficultyepoch: Endpoint, monthindex: Endpoint, quarterindex: Endpoint, semesterindex: Endpoint, weekindex: Endpoint, yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern2 */ /** - * @template T - * @typedef {Object} Indexes4 - * @property {Indexes4By} by - */ - -/** - * Create a Indexes4 accessor + * Create a MetricPattern2 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes4} + * @param {string} name - The metric vec name + * @returns {MetricPattern2} */ -function createIndexes4(client, basePath) { +function createMetricPattern2(client, name) { return { + name, by: { - dateindex: new MetricNode(client, `${basePath}/dateindex`), - decadeindex: new MetricNode(client, `${basePath}/decadeindex`), - difficultyepoch: new MetricNode(client, `${basePath}/difficultyepoch`), - monthindex: new MetricNode(client, `${basePath}/monthindex`), - quarterindex: new MetricNode(client, `${basePath}/quarterindex`), - semesterindex: new MetricNode(client, `${basePath}/semesterindex`), - weekindex: new MetricNode(client, `${basePath}/weekindex`), - yearindex: new MetricNode(client, `${basePath}/yearindex`) + get dateindex() { return _endpoint(client, name, 'dateindex'); }, + get decadeindex() { return _endpoint(client, name, 'decadeindex'); }, + get difficultyepoch() { return _endpoint(client, name, 'difficultyepoch'); }, + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get quarterindex() { return _endpoint(client, name, 'quarterindex'); }, + get semesterindex() { return _endpoint(client, name, 'semesterindex'); }, + get weekindex() { return _endpoint(client, name, 'weekindex'); }, + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['dateindex', 'decadeindex', 'difficultyepoch', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes26By - * @property {MetricNode} dateindex - * @property {MetricNode} decadeindex - * @property {MetricNode} height - * @property {MetricNode} monthindex - * @property {MetricNode} quarterindex - * @property {MetricNode} semesterindex - * @property {MetricNode} weekindex - * @property {MetricNode} yearindex + * @typedef {{ name: string, by: { dateindex: Endpoint, decadeindex: Endpoint, height: Endpoint, monthindex: Endpoint, quarterindex: Endpoint, semesterindex: Endpoint, weekindex: Endpoint, yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern3 */ /** - * @template T - * @typedef {Object} Indexes26 - * @property {Indexes26By} by - */ - -/** - * Create a Indexes26 accessor + * Create a MetricPattern3 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes26} + * @param {string} name - The metric vec name + * @returns {MetricPattern3} */ -function createIndexes26(client, basePath) { +function createMetricPattern3(client, name) { return { + name, by: { - dateindex: new MetricNode(client, `${basePath}/dateindex`), - decadeindex: new MetricNode(client, `${basePath}/decadeindex`), - height: new MetricNode(client, `${basePath}/height`), - monthindex: new MetricNode(client, `${basePath}/monthindex`), - quarterindex: new MetricNode(client, `${basePath}/quarterindex`), - semesterindex: new MetricNode(client, `${basePath}/semesterindex`), - weekindex: new MetricNode(client, `${basePath}/weekindex`), - yearindex: new MetricNode(client, `${basePath}/yearindex`) + get dateindex() { return _endpoint(client, name, 'dateindex'); }, + get decadeindex() { return _endpoint(client, name, 'decadeindex'); }, + get height() { return _endpoint(client, name, 'height'); }, + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get quarterindex() { return _endpoint(client, name, 'quarterindex'); }, + get semesterindex() { return _endpoint(client, name, 'semesterindex'); }, + get weekindex() { return _endpoint(client, name, 'weekindex'); }, + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['dateindex', 'decadeindex', 'height', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} IndexesBy - * @property {MetricNode} dateindex - * @property {MetricNode} decadeindex - * @property {MetricNode} monthindex - * @property {MetricNode} quarterindex - * @property {MetricNode} semesterindex - * @property {MetricNode} weekindex - * @property {MetricNode} yearindex + * @typedef {{ name: string, by: { dateindex: Endpoint, decadeindex: Endpoint, monthindex: Endpoint, quarterindex: Endpoint, semesterindex: Endpoint, weekindex: Endpoint, yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern4 */ /** - * @template T - * @typedef {Object} Indexes - * @property {IndexesBy} by - */ - -/** - * Create a Indexes accessor + * Create a MetricPattern4 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes} + * @param {string} name - The metric vec name + * @returns {MetricPattern4} */ -function createIndexes(client, basePath) { +function createMetricPattern4(client, name) { return { + name, by: { - dateindex: new MetricNode(client, `${basePath}/dateindex`), - decadeindex: new MetricNode(client, `${basePath}/decadeindex`), - monthindex: new MetricNode(client, `${basePath}/monthindex`), - quarterindex: new MetricNode(client, `${basePath}/quarterindex`), - semesterindex: new MetricNode(client, `${basePath}/semesterindex`), - weekindex: new MetricNode(client, `${basePath}/weekindex`), - yearindex: new MetricNode(client, `${basePath}/yearindex`) + get dateindex() { return _endpoint(client, name, 'dateindex'); }, + get decadeindex() { return _endpoint(client, name, 'decadeindex'); }, + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get quarterindex() { return _endpoint(client, name, 'quarterindex'); }, + get semesterindex() { return _endpoint(client, name, 'semesterindex'); }, + get weekindex() { return _endpoint(client, name, 'weekindex'); }, + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['dateindex', 'decadeindex', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes27By - * @property {MetricNode} decadeindex - * @property {MetricNode} height - * @property {MetricNode} monthindex - * @property {MetricNode} quarterindex - * @property {MetricNode} semesterindex - * @property {MetricNode} weekindex - * @property {MetricNode} yearindex + * @typedef {{ name: string, by: { decadeindex: Endpoint, height: Endpoint, monthindex: Endpoint, quarterindex: Endpoint, semesterindex: Endpoint, weekindex: Endpoint, yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern5 */ /** - * @template T - * @typedef {Object} Indexes27 - * @property {Indexes27By} by - */ - -/** - * Create a Indexes27 accessor + * Create a MetricPattern5 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes27} + * @param {string} name - The metric vec name + * @returns {MetricPattern5} */ -function createIndexes27(client, basePath) { +function createMetricPattern5(client, name) { return { + name, by: { - decadeindex: new MetricNode(client, `${basePath}/decadeindex`), - height: new MetricNode(client, `${basePath}/height`), - monthindex: new MetricNode(client, `${basePath}/monthindex`), - quarterindex: new MetricNode(client, `${basePath}/quarterindex`), - semesterindex: new MetricNode(client, `${basePath}/semesterindex`), - weekindex: new MetricNode(client, `${basePath}/weekindex`), - yearindex: new MetricNode(client, `${basePath}/yearindex`) + get decadeindex() { return _endpoint(client, name, 'decadeindex'); }, + get height() { return _endpoint(client, name, 'height'); }, + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get quarterindex() { return _endpoint(client, name, 'quarterindex'); }, + get semesterindex() { return _endpoint(client, name, 'semesterindex'); }, + get weekindex() { return _endpoint(client, name, 'weekindex'); }, + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['decadeindex', 'height', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes28By - * @property {MetricNode} decadeindex - * @property {MetricNode} monthindex - * @property {MetricNode} quarterindex - * @property {MetricNode} semesterindex - * @property {MetricNode} weekindex - * @property {MetricNode} yearindex + * @typedef {{ name: string, by: { decadeindex: Endpoint, monthindex: Endpoint, quarterindex: Endpoint, semesterindex: Endpoint, weekindex: Endpoint, yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern6 */ /** - * @template T - * @typedef {Object} Indexes28 - * @property {Indexes28By} by - */ - -/** - * Create a Indexes28 accessor + * Create a MetricPattern6 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes28} + * @param {string} name - The metric vec name + * @returns {MetricPattern6} */ -function createIndexes28(client, basePath) { +function createMetricPattern6(client, name) { return { + name, by: { - decadeindex: new MetricNode(client, `${basePath}/decadeindex`), - monthindex: new MetricNode(client, `${basePath}/monthindex`), - quarterindex: new MetricNode(client, `${basePath}/quarterindex`), - semesterindex: new MetricNode(client, `${basePath}/semesterindex`), - weekindex: new MetricNode(client, `${basePath}/weekindex`), - yearindex: new MetricNode(client, `${basePath}/yearindex`) + get decadeindex() { return _endpoint(client, name, 'decadeindex'); }, + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get quarterindex() { return _endpoint(client, name, 'quarterindex'); }, + get semesterindex() { return _endpoint(client, name, 'semesterindex'); }, + get weekindex() { return _endpoint(client, name, 'weekindex'); }, + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['decadeindex', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes15By - * @property {MetricNode} quarterindex - * @property {MetricNode} semesterindex - * @property {MetricNode} yearindex + * @typedef {{ name: string, by: { emptyoutputindex: Endpoint, opreturnindex: Endpoint, p2msoutputindex: Endpoint, unknownoutputindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern7 */ /** - * @template T - * @typedef {Object} Indexes15 - * @property {Indexes15By} by - */ - -/** - * Create a Indexes15 accessor + * Create a MetricPattern7 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes15} + * @param {string} name - The metric vec name + * @returns {MetricPattern7} */ -function createIndexes15(client, basePath) { +function createMetricPattern7(client, name) { return { + name, by: { - quarterindex: new MetricNode(client, `${basePath}/quarterindex`), - semesterindex: new MetricNode(client, `${basePath}/semesterindex`), - yearindex: new MetricNode(client, `${basePath}/yearindex`) + get emptyoutputindex() { return _endpoint(client, name, 'emptyoutputindex'); }, + get opreturnindex() { return _endpoint(client, name, 'opreturnindex'); }, + get p2msoutputindex() { return _endpoint(client, name, 'p2msoutputindex'); }, + get unknownoutputindex() { return _endpoint(client, name, 'unknownoutputindex'); } + }, + indexes() { + return ['emptyoutputindex', 'opreturnindex', 'p2msoutputindex', 'unknownoutputindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes13By - * @property {MetricNode} dateindex - * @property {MetricNode} height + * @typedef {{ name: string, by: { quarterindex: Endpoint, semesterindex: Endpoint, yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern8 */ /** - * @template T - * @typedef {Object} Indexes13 - * @property {Indexes13By} by - */ - -/** - * Create a Indexes13 accessor + * Create a MetricPattern8 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes13} + * @param {string} name - The metric vec name + * @returns {MetricPattern8} */ -function createIndexes13(client, basePath) { +function createMetricPattern8(client, name) { return { + name, by: { - dateindex: new MetricNode(client, `${basePath}/dateindex`), - height: new MetricNode(client, `${basePath}/height`) + get quarterindex() { return _endpoint(client, name, 'quarterindex'); }, + get semesterindex() { return _endpoint(client, name, 'semesterindex'); }, + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['quarterindex', 'semesterindex', 'yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes14By - * @property {MetricNode} monthindex - * @property {MetricNode} weekindex + * @typedef {{ name: string, by: { dateindex: Endpoint, height: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern9 */ /** - * @template T - * @typedef {Object} Indexes14 - * @property {Indexes14By} by - */ - -/** - * Create a Indexes14 accessor + * Create a MetricPattern9 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes14} + * @param {string} name - The metric vec name + * @returns {MetricPattern9} */ -function createIndexes14(client, basePath) { +function createMetricPattern9(client, name) { return { + name, by: { - monthindex: new MetricNode(client, `${basePath}/monthindex`), - weekindex: new MetricNode(client, `${basePath}/weekindex`) + get dateindex() { return _endpoint(client, name, 'dateindex'); }, + get height() { return _endpoint(client, name, 'height'); } + }, + indexes() { + return ['dateindex', 'height']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes2By - * @property {MetricNode} height + * @typedef {{ name: string, by: { dateindex: Endpoint, monthindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern10 */ /** - * @template T - * @typedef {Object} Indexes2 - * @property {Indexes2By} by - */ - -/** - * Create a Indexes2 accessor + * Create a MetricPattern10 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes2} + * @param {string} name - The metric vec name + * @returns {MetricPattern10} */ -function createIndexes2(client, basePath) { +function createMetricPattern10(client, name) { return { + name, by: { - height: new MetricNode(client, `${basePath}/height`) + get dateindex() { return _endpoint(client, name, 'dateindex'); }, + get monthindex() { return _endpoint(client, name, 'monthindex'); } + }, + indexes() { + return ['dateindex', 'monthindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes5By - * @property {MetricNode} dateindex + * @typedef {{ name: string, by: { dateindex: Endpoint, weekindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern11 */ /** - * @template T - * @typedef {Object} Indexes5 - * @property {Indexes5By} by - */ - -/** - * Create a Indexes5 accessor + * Create a MetricPattern11 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes5} + * @param {string} name - The metric vec name + * @returns {MetricPattern11} */ -function createIndexes5(client, basePath) { +function createMetricPattern11(client, name) { return { + name, by: { - dateindex: new MetricNode(client, `${basePath}/dateindex`) + get dateindex() { return _endpoint(client, name, 'dateindex'); }, + get weekindex() { return _endpoint(client, name, 'weekindex'); } + }, + indexes() { + return ['dateindex', 'weekindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes6By - * @property {MetricNode} txindex + * @typedef {{ name: string, by: { decadeindex: Endpoint, yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern12 */ /** - * @template T - * @typedef {Object} Indexes6 - * @property {Indexes6By} by - */ - -/** - * Create a Indexes6 accessor + * Create a MetricPattern12 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes6} + * @param {string} name - The metric vec name + * @returns {MetricPattern12} */ -function createIndexes6(client, basePath) { +function createMetricPattern12(client, name) { return { + name, by: { - txindex: new MetricNode(client, `${basePath}/txindex`) + get decadeindex() { return _endpoint(client, name, 'decadeindex'); }, + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['decadeindex', 'yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes7By - * @property {MetricNode} decadeindex + * @typedef {{ name: string, by: { difficultyepoch: Endpoint, halvingepoch: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern13 */ /** - * @template T - * @typedef {Object} Indexes7 - * @property {Indexes7By} by - */ - -/** - * Create a Indexes7 accessor + * Create a MetricPattern13 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes7} + * @param {string} name - The metric vec name + * @returns {MetricPattern13} */ -function createIndexes7(client, basePath) { +function createMetricPattern13(client, name) { return { + name, by: { - decadeindex: new MetricNode(client, `${basePath}/decadeindex`) + get difficultyepoch() { return _endpoint(client, name, 'difficultyepoch'); }, + get halvingepoch() { return _endpoint(client, name, 'halvingepoch'); } + }, + indexes() { + return ['difficultyepoch', 'halvingepoch']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes8By - * @property {MetricNode} monthindex + * @typedef {{ name: string, by: { difficultyepoch: Endpoint, height: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern14 */ /** - * @template T - * @typedef {Object} Indexes8 - * @property {Indexes8By} by - */ - -/** - * Create a Indexes8 accessor + * Create a MetricPattern14 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes8} + * @param {string} name - The metric vec name + * @returns {MetricPattern14} */ -function createIndexes8(client, basePath) { +function createMetricPattern14(client, name) { return { + name, by: { - monthindex: new MetricNode(client, `${basePath}/monthindex`) + get difficultyepoch() { return _endpoint(client, name, 'difficultyepoch'); }, + get height() { return _endpoint(client, name, 'height'); } + }, + indexes() { + return ['difficultyepoch', 'height']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes9By - * @property {MetricNode} quarterindex + * @typedef {{ name: string, by: { halvingepoch: Endpoint, height: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern15 */ /** - * @template T - * @typedef {Object} Indexes9 - * @property {Indexes9By} by - */ - -/** - * Create a Indexes9 accessor + * Create a MetricPattern15 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes9} + * @param {string} name - The metric vec name + * @returns {MetricPattern15} */ -function createIndexes9(client, basePath) { +function createMetricPattern15(client, name) { return { + name, by: { - quarterindex: new MetricNode(client, `${basePath}/quarterindex`) + get halvingepoch() { return _endpoint(client, name, 'halvingepoch'); }, + get height() { return _endpoint(client, name, 'height'); } + }, + indexes() { + return ['halvingepoch', 'height']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes10By - * @property {MetricNode} semesterindex + * @typedef {{ name: string, by: { height: Endpoint, txindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern16 */ /** - * @template T - * @typedef {Object} Indexes10 - * @property {Indexes10By} by - */ - -/** - * Create a Indexes10 accessor + * Create a MetricPattern16 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes10} + * @param {string} name - The metric vec name + * @returns {MetricPattern16} */ -function createIndexes10(client, basePath) { +function createMetricPattern16(client, name) { return { + name, by: { - semesterindex: new MetricNode(client, `${basePath}/semesterindex`) + get height() { return _endpoint(client, name, 'height'); }, + get txindex() { return _endpoint(client, name, 'txindex'); } + }, + indexes() { + return ['height', 'txindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes11By - * @property {MetricNode} weekindex + * @typedef {{ name: string, by: { monthindex: Endpoint, quarterindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern17 */ /** - * @template T - * @typedef {Object} Indexes11 - * @property {Indexes11By} by - */ - -/** - * Create a Indexes11 accessor + * Create a MetricPattern17 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes11} + * @param {string} name - The metric vec name + * @returns {MetricPattern17} */ -function createIndexes11(client, basePath) { +function createMetricPattern17(client, name) { return { + name, by: { - weekindex: new MetricNode(client, `${basePath}/weekindex`) + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get quarterindex() { return _endpoint(client, name, 'quarterindex'); } + }, + indexes() { + return ['monthindex', 'quarterindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes12By - * @property {MetricNode} yearindex + * @typedef {{ name: string, by: { monthindex: Endpoint, semesterindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern18 */ /** - * @template T - * @typedef {Object} Indexes12 - * @property {Indexes12By} by - */ - -/** - * Create a Indexes12 accessor + * Create a MetricPattern18 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes12} + * @param {string} name - The metric vec name + * @returns {MetricPattern18} */ -function createIndexes12(client, basePath) { +function createMetricPattern18(client, name) { return { + name, by: { - yearindex: new MetricNode(client, `${basePath}/yearindex`) + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get semesterindex() { return _endpoint(client, name, 'semesterindex'); } + }, + indexes() { + return ['monthindex', 'semesterindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes16By - * @property {MetricNode} p2aaddressindex + * @typedef {{ name: string, by: { monthindex: Endpoint, weekindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern19 */ /** - * @template T - * @typedef {Object} Indexes16 - * @property {Indexes16By} by - */ - -/** - * Create a Indexes16 accessor + * Create a MetricPattern19 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes16} + * @param {string} name - The metric vec name + * @returns {MetricPattern19} */ -function createIndexes16(client, basePath) { +function createMetricPattern19(client, name) { return { + name, by: { - p2aaddressindex: new MetricNode(client, `${basePath}/p2aaddressindex`) + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get weekindex() { return _endpoint(client, name, 'weekindex'); } + }, + indexes() { + return ['monthindex', 'weekindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes17By - * @property {MetricNode} p2pk33addressindex + * @typedef {{ name: string, by: { monthindex: Endpoint, yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern20 */ /** - * @template T - * @typedef {Object} Indexes17 - * @property {Indexes17By} by - */ - -/** - * Create a Indexes17 accessor + * Create a MetricPattern20 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes17} + * @param {string} name - The metric vec name + * @returns {MetricPattern20} */ -function createIndexes17(client, basePath) { +function createMetricPattern20(client, name) { return { + name, by: { - p2pk33addressindex: new MetricNode(client, `${basePath}/p2pk33addressindex`) + get monthindex() { return _endpoint(client, name, 'monthindex'); }, + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['monthindex', 'yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes18By - * @property {MetricNode} p2pk65addressindex + * @typedef {{ name: string, by: { dateindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern21 */ /** - * @template T - * @typedef {Object} Indexes18 - * @property {Indexes18By} by - */ - -/** - * Create a Indexes18 accessor + * Create a MetricPattern21 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes18} + * @param {string} name - The metric vec name + * @returns {MetricPattern21} */ -function createIndexes18(client, basePath) { +function createMetricPattern21(client, name) { return { + name, by: { - p2pk65addressindex: new MetricNode(client, `${basePath}/p2pk65addressindex`) + get dateindex() { return _endpoint(client, name, 'dateindex'); } + }, + indexes() { + return ['dateindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes19By - * @property {MetricNode} p2pkhaddressindex + * @typedef {{ name: string, by: { decadeindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern22 */ /** - * @template T - * @typedef {Object} Indexes19 - * @property {Indexes19By} by - */ - -/** - * Create a Indexes19 accessor + * Create a MetricPattern22 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes19} + * @param {string} name - The metric vec name + * @returns {MetricPattern22} */ -function createIndexes19(client, basePath) { +function createMetricPattern22(client, name) { return { + name, by: { - p2pkhaddressindex: new MetricNode(client, `${basePath}/p2pkhaddressindex`) + get decadeindex() { return _endpoint(client, name, 'decadeindex'); } + }, + indexes() { + return ['decadeindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes20By - * @property {MetricNode} p2shaddressindex + * @typedef {{ name: string, by: { difficultyepoch: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern23 */ /** - * @template T - * @typedef {Object} Indexes20 - * @property {Indexes20By} by - */ - -/** - * Create a Indexes20 accessor + * Create a MetricPattern23 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes20} + * @param {string} name - The metric vec name + * @returns {MetricPattern23} */ -function createIndexes20(client, basePath) { +function createMetricPattern23(client, name) { return { + name, by: { - p2shaddressindex: new MetricNode(client, `${basePath}/p2shaddressindex`) + get difficultyepoch() { return _endpoint(client, name, 'difficultyepoch'); } + }, + indexes() { + return ['difficultyepoch']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes21By - * @property {MetricNode} p2traddressindex + * @typedef {{ name: string, by: { emptyoutputindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern24 */ /** - * @template T - * @typedef {Object} Indexes21 - * @property {Indexes21By} by - */ - -/** - * Create a Indexes21 accessor + * Create a MetricPattern24 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes21} + * @param {string} name - The metric vec name + * @returns {MetricPattern24} */ -function createIndexes21(client, basePath) { +function createMetricPattern24(client, name) { return { + name, by: { - p2traddressindex: new MetricNode(client, `${basePath}/p2traddressindex`) + get emptyoutputindex() { return _endpoint(client, name, 'emptyoutputindex'); } + }, + indexes() { + return ['emptyoutputindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes22By - * @property {MetricNode} p2wpkhaddressindex + * @typedef {{ name: string, by: { height: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern25 */ /** - * @template T - * @typedef {Object} Indexes22 - * @property {Indexes22By} by - */ - -/** - * Create a Indexes22 accessor + * Create a MetricPattern25 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes22} + * @param {string} name - The metric vec name + * @returns {MetricPattern25} */ -function createIndexes22(client, basePath) { +function createMetricPattern25(client, name) { return { + name, by: { - p2wpkhaddressindex: new MetricNode(client, `${basePath}/p2wpkhaddressindex`) + get height() { return _endpoint(client, name, 'height'); } + }, + indexes() { + return ['height']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes23By - * @property {MetricNode} p2wshaddressindex + * @typedef {{ name: string, by: { txinindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern26 */ /** - * @template T - * @typedef {Object} Indexes23 - * @property {Indexes23By} by - */ - -/** - * Create a Indexes23 accessor + * Create a MetricPattern26 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes23} + * @param {string} name - The metric vec name + * @returns {MetricPattern26} */ -function createIndexes23(client, basePath) { +function createMetricPattern26(client, name) { return { + name, by: { - p2wshaddressindex: new MetricNode(client, `${basePath}/p2wshaddressindex`) + get txinindex() { return _endpoint(client, name, 'txinindex'); } + }, + indexes() { + return ['txinindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes24By - * @property {MetricNode} txinindex + * @typedef {{ name: string, by: { monthindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern27 */ /** - * @template T - * @typedef {Object} Indexes24 - * @property {Indexes24By} by - */ - -/** - * Create a Indexes24 accessor + * Create a MetricPattern27 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes24} + * @param {string} name - The metric vec name + * @returns {MetricPattern27} */ -function createIndexes24(client, basePath) { +function createMetricPattern27(client, name) { return { + name, by: { - txinindex: new MetricNode(client, `${basePath}/txinindex`) + get monthindex() { return _endpoint(client, name, 'monthindex'); } + }, + indexes() { + return ['monthindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes25By - * @property {MetricNode} txoutindex + * @typedef {{ name: string, by: { opreturnindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern28 */ /** - * @template T - * @typedef {Object} Indexes25 - * @property {Indexes25By} by - */ - -/** - * Create a Indexes25 accessor + * Create a MetricPattern28 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes25} + * @param {string} name - The metric vec name + * @returns {MetricPattern28} */ -function createIndexes25(client, basePath) { +function createMetricPattern28(client, name) { return { + name, by: { - txoutindex: new MetricNode(client, `${basePath}/txoutindex`) + get opreturnindex() { return _endpoint(client, name, 'opreturnindex'); } + }, + indexes() { + return ['opreturnindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes29By - * @property {MetricNode} emptyaddressindex + * @typedef {{ name: string, by: { txoutindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern29 */ /** - * @template T - * @typedef {Object} Indexes29 - * @property {Indexes29By} by - */ - -/** - * Create a Indexes29 accessor + * Create a MetricPattern29 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes29} + * @param {string} name - The metric vec name + * @returns {MetricPattern29} */ -function createIndexes29(client, basePath) { +function createMetricPattern29(client, name) { return { + name, by: { - emptyaddressindex: new MetricNode(client, `${basePath}/emptyaddressindex`) + get txoutindex() { return _endpoint(client, name, 'txoutindex'); } + }, + indexes() { + return ['txoutindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } /** * @template T - * @typedef {Object} Indexes30By - * @property {MetricNode} loadedaddressindex + * @typedef {{ name: string, by: { p2aaddressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern30 */ /** - * @template T - * @typedef {Object} Indexes30 - * @property {Indexes30By} by - */ - -/** - * Create a Indexes30 accessor + * Create a MetricPattern30 accessor * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Indexes30} + * @param {string} name - The metric vec name + * @returns {MetricPattern30} */ -function createIndexes30(client, basePath) { +function createMetricPattern30(client, name) { return { + name, by: { - loadedaddressindex: new MetricNode(client, `${basePath}/loadedaddressindex`) + get p2aaddressindex() { return _endpoint(client, name, 'p2aaddressindex'); } + }, + indexes() { + return ['p2aaddressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { p2msoutputindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern31 + */ + +/** + * Create a MetricPattern31 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern31} + */ +function createMetricPattern31(client, name) { + return { + name, + by: { + get p2msoutputindex() { return _endpoint(client, name, 'p2msoutputindex'); } + }, + indexes() { + return ['p2msoutputindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { p2pk33addressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern32 + */ + +/** + * Create a MetricPattern32 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern32} + */ +function createMetricPattern32(client, name) { + return { + name, + by: { + get p2pk33addressindex() { return _endpoint(client, name, 'p2pk33addressindex'); } + }, + indexes() { + return ['p2pk33addressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { p2pk65addressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern33 + */ + +/** + * Create a MetricPattern33 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern33} + */ +function createMetricPattern33(client, name) { + return { + name, + by: { + get p2pk65addressindex() { return _endpoint(client, name, 'p2pk65addressindex'); } + }, + indexes() { + return ['p2pk65addressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { p2pkhaddressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern34 + */ + +/** + * Create a MetricPattern34 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern34} + */ +function createMetricPattern34(client, name) { + return { + name, + by: { + get p2pkhaddressindex() { return _endpoint(client, name, 'p2pkhaddressindex'); } + }, + indexes() { + return ['p2pkhaddressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { p2shaddressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern35 + */ + +/** + * Create a MetricPattern35 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern35} + */ +function createMetricPattern35(client, name) { + return { + name, + by: { + get p2shaddressindex() { return _endpoint(client, name, 'p2shaddressindex'); } + }, + indexes() { + return ['p2shaddressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { p2traddressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern36 + */ + +/** + * Create a MetricPattern36 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern36} + */ +function createMetricPattern36(client, name) { + return { + name, + by: { + get p2traddressindex() { return _endpoint(client, name, 'p2traddressindex'); } + }, + indexes() { + return ['p2traddressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { p2wpkhaddressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern37 + */ + +/** + * Create a MetricPattern37 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern37} + */ +function createMetricPattern37(client, name) { + return { + name, + by: { + get p2wpkhaddressindex() { return _endpoint(client, name, 'p2wpkhaddressindex'); } + }, + indexes() { + return ['p2wpkhaddressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { p2wshaddressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern38 + */ + +/** + * Create a MetricPattern38 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern38} + */ +function createMetricPattern38(client, name) { + return { + name, + by: { + get p2wshaddressindex() { return _endpoint(client, name, 'p2wshaddressindex'); } + }, + indexes() { + return ['p2wshaddressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { quarterindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern39 + */ + +/** + * Create a MetricPattern39 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern39} + */ +function createMetricPattern39(client, name) { + return { + name, + by: { + get quarterindex() { return _endpoint(client, name, 'quarterindex'); } + }, + indexes() { + return ['quarterindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { semesterindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern40 + */ + +/** + * Create a MetricPattern40 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern40} + */ +function createMetricPattern40(client, name) { + return { + name, + by: { + get semesterindex() { return _endpoint(client, name, 'semesterindex'); } + }, + indexes() { + return ['semesterindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { txindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern41 + */ + +/** + * Create a MetricPattern41 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern41} + */ +function createMetricPattern41(client, name) { + return { + name, + by: { + get txindex() { return _endpoint(client, name, 'txindex'); } + }, + indexes() { + return ['txindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { unknownoutputindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern42 + */ + +/** + * Create a MetricPattern42 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern42} + */ +function createMetricPattern42(client, name) { + return { + name, + by: { + get unknownoutputindex() { return _endpoint(client, name, 'unknownoutputindex'); } + }, + indexes() { + return ['unknownoutputindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { weekindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern43 + */ + +/** + * Create a MetricPattern43 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern43} + */ +function createMetricPattern43(client, name) { + return { + name, + by: { + get weekindex() { return _endpoint(client, name, 'weekindex'); } + }, + indexes() { + return ['weekindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { yearindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern44 + */ + +/** + * Create a MetricPattern44 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern44} + */ +function createMetricPattern44(client, name) { + return { + name, + by: { + get yearindex() { return _endpoint(client, name, 'yearindex'); } + }, + indexes() { + return ['yearindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { loadedaddressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern45 + */ + +/** + * Create a MetricPattern45 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern45} + */ +function createMetricPattern45(client, name) { + return { + name, + by: { + get loadedaddressindex() { return _endpoint(client, name, 'loadedaddressindex'); } + }, + indexes() { + return ['loadedaddressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } + } + }; +} + +/** + * @template T + * @typedef {{ name: string, by: { emptyaddressindex: Endpoint }, indexes: () => Index[], get: (index: Index) => Endpoint|undefined }} MetricPattern46 + */ + +/** + * Create a MetricPattern46 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} name - The metric vec name + * @returns {MetricPattern46} + */ +function createMetricPattern46(client, name) { + return { + name, + by: { + get emptyaddressindex() { return _endpoint(client, name, 'emptyaddressindex'); } + }, + indexes() { + return ['emptyaddressindex']; + }, + get(index) { + if (this.indexes().includes(index)) { + return _endpoint(client, name, index); + } } }; } @@ -2429,719 +2041,696 @@ function createIndexes30(client, basePath) { /** * @typedef {Object} RealizedPattern3 - * @property {Indexes5} adjustedSopr - * @property {Indexes5} adjustedSopr30dEma - * @property {Indexes5} adjustedSopr7dEma - * @property {Indexes3} adjustedValueCreated - * @property {Indexes3} adjustedValueDestroyed + * @property {MetricPattern21} adjustedSopr + * @property {MetricPattern21} adjustedSopr30dEma + * @property {MetricPattern21} adjustedSopr7dEma + * @property {MetricPattern1} adjustedValueCreated + * @property {MetricPattern1} adjustedValueDestroyed + * @property {MetricPattern4} mvrv * @property {BlockCountPattern} negRealizedLoss * @property {BlockCountPattern} netRealizedPnl - * @property {Indexes} netRealizedPnlCumulative30dDelta - * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToMarketCap - * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToRealizedCap - * @property {Indexes2} netRealizedPnlRelToRealizedCap - * @property {Indexes3} realizedCap - * @property {Indexes} realizedCap30dDelta - * @property {Indexes3} realizedCapRelToOwnMarketCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDelta + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {MetricPattern25} netRealizedPnlRelToRealizedCap + * @property {MetricPattern1} realizedCap + * @property {MetricPattern4} realizedCap30dDelta + * @property {MetricPattern1} realizedCapRelToOwnMarketCap * @property {BlockCountPattern} realizedLoss - * @property {Indexes2} realizedLossRelToRealizedCap - * @property {Indexes3} realizedPrice + * @property {MetricPattern25} realizedLossRelToRealizedCap + * @property {MetricPattern1} realizedPrice * @property {ActivePriceRatioPattern} realizedPriceExtra * @property {BlockCountPattern} realizedProfit - * @property {Indexes2} realizedProfitRelToRealizedCap - * @property {Indexes5} realizedProfitToLossRatio - * @property {Indexes3} realizedValue - * @property {Indexes5} sellSideRiskRatio - * @property {Indexes5} sellSideRiskRatio30dEma - * @property {Indexes5} sellSideRiskRatio7dEma - * @property {Indexes5} sopr - * @property {Indexes5} sopr30dEma - * @property {Indexes5} sopr7dEma - * @property {BitcoinPattern2} totalRealizedPnl - * @property {Indexes3} valueCreated - * @property {Indexes3} valueDestroyed + * @property {MetricPattern25} realizedProfitRelToRealizedCap + * @property {MetricPattern21} realizedProfitToLossRatio + * @property {MetricPattern1} realizedValue + * @property {MetricPattern21} sellSideRiskRatio + * @property {MetricPattern21} sellSideRiskRatio30dEma + * @property {MetricPattern21} sellSideRiskRatio7dEma + * @property {MetricPattern21} sopr + * @property {MetricPattern21} sopr30dEma + * @property {MetricPattern21} sopr7dEma + * @property {TotalRealizedPnlPattern} totalRealizedPnl + * @property {MetricPattern1} valueCreated + * @property {MetricPattern1} valueDestroyed */ /** * Create a RealizedPattern3 pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {RealizedPattern3} */ -function createRealizedPattern3(client, basePath) { +function createRealizedPattern3(client, acc) { return { - adjustedSopr: createIndexes5(client, `${basePath}/adjusted_sopr`), - adjustedSopr30dEma: createIndexes5(client, `${basePath}/adjusted_sopr_30d_ema`), - adjustedSopr7dEma: createIndexes5(client, `${basePath}/adjusted_sopr_7d_ema`), - adjustedValueCreated: createIndexes3(client, `${basePath}/adjusted_value_created`), - adjustedValueDestroyed: createIndexes3(client, `${basePath}/adjusted_value_destroyed`), - negRealizedLoss: createBlockCountPattern(client, `${basePath}/neg_realized_loss`), - netRealizedPnl: createBlockCountPattern(client, `${basePath}/net_realized_pnl`), - netRealizedPnlCumulative30dDelta: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta`), - netRealizedPnlCumulative30dDeltaRelToMarketCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_market_cap`), - netRealizedPnlCumulative30dDeltaRelToRealizedCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap`), - netRealizedPnlRelToRealizedCap: createIndexes2(client, `${basePath}/net_realized_pnl_rel_to_realized_cap`), - realizedCap: createIndexes3(client, `${basePath}/realized_cap`), - realizedCap30dDelta: createIndexes(client, `${basePath}/realized_cap_30d_delta`), - realizedCapRelToOwnMarketCap: createIndexes3(client, `${basePath}/realized_cap_rel_to_own_market_cap`), - realizedLoss: createBlockCountPattern(client, `${basePath}/realized_loss`), - realizedLossRelToRealizedCap: createIndexes2(client, `${basePath}/realized_loss_rel_to_realized_cap`), - realizedPrice: createIndexes3(client, `${basePath}/realized_price`), - realizedPriceExtra: createActivePriceRatioPattern(client, `${basePath}/realized_price_extra`), - realizedProfit: createBlockCountPattern(client, `${basePath}/realized_profit`), - realizedProfitRelToRealizedCap: createIndexes2(client, `${basePath}/realized_profit_rel_to_realized_cap`), - realizedProfitToLossRatio: createIndexes5(client, `${basePath}/realized_profit_to_loss_ratio`), - realizedValue: createIndexes3(client, `${basePath}/realized_value`), - sellSideRiskRatio: createIndexes5(client, `${basePath}/sell_side_risk_ratio`), - sellSideRiskRatio30dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_30d_ema`), - sellSideRiskRatio7dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_7d_ema`), - sopr: createIndexes5(client, `${basePath}/sopr`), - sopr30dEma: createIndexes5(client, `${basePath}/sopr_30d_ema`), - sopr7dEma: createIndexes5(client, `${basePath}/sopr_7d_ema`), - totalRealizedPnl: createBitcoinPattern2(client, `${basePath}/total_realized_pnl`), - valueCreated: createIndexes3(client, `${basePath}/value_created`), - valueDestroyed: createIndexes3(client, `${basePath}/value_destroyed`) - }; -} - -/** - * @typedef {Object} Ratio1ySdPattern2 - * @property {Indexes} _0sdUsd - * @property {Indexes} m05sd - * @property {Indexes} m05sdUsd - * @property {Indexes} m15sd - * @property {Indexes} m15sdUsd - * @property {Indexes} m1sd - * @property {Indexes} m1sdUsd - * @property {Indexes} m25sd - * @property {Indexes} m25sdUsd - * @property {Indexes} m2sd - * @property {Indexes} m2sdUsd - * @property {Indexes} m3sd - * @property {Indexes} m3sdUsd - * @property {Indexes} p05sd - * @property {Indexes} p05sdUsd - * @property {Indexes} p15sd - * @property {Indexes} p15sdUsd - * @property {Indexes} p1sd - * @property {Indexes} p1sdUsd - * @property {Indexes} p25sd - * @property {Indexes} p25sdUsd - * @property {Indexes} p2sd - * @property {Indexes} p2sdUsd - * @property {Indexes} p3sd - * @property {Indexes} p3sdUsd - * @property {Indexes} sd - * @property {Indexes} sma - * @property {Indexes} zscore - */ - -/** - * Create a Ratio1ySdPattern2 pattern node - * @param {BrkClientBase} client - * @param {string} basePath - * @returns {Ratio1ySdPattern2} - */ -function createRatio1ySdPattern2(client, basePath) { - return { - _0sdUsd: createIndexes(client, `${basePath}/_0sd_usd`), - m05sd: createIndexes(client, `${basePath}/m0_5sd`), - m05sdUsd: createIndexes(client, `${basePath}/m0_5sd_usd`), - m15sd: createIndexes(client, `${basePath}/m1_5sd`), - m15sdUsd: createIndexes(client, `${basePath}/m1_5sd_usd`), - m1sd: createIndexes(client, `${basePath}/m1sd`), - m1sdUsd: createIndexes(client, `${basePath}/m1sd_usd`), - m25sd: createIndexes(client, `${basePath}/m2_5sd`), - m25sdUsd: createIndexes(client, `${basePath}/m2_5sd_usd`), - m2sd: createIndexes(client, `${basePath}/m2sd`), - m2sdUsd: createIndexes(client, `${basePath}/m2sd_usd`), - m3sd: createIndexes(client, `${basePath}/m3sd`), - m3sdUsd: createIndexes(client, `${basePath}/m3sd_usd`), - p05sd: createIndexes(client, `${basePath}/p0_5sd`), - p05sdUsd: createIndexes(client, `${basePath}/p0_5sd_usd`), - p15sd: createIndexes(client, `${basePath}/p1_5sd`), - p15sdUsd: createIndexes(client, `${basePath}/p1_5sd_usd`), - p1sd: createIndexes(client, `${basePath}/p1sd`), - p1sdUsd: createIndexes(client, `${basePath}/p1sd_usd`), - p25sd: createIndexes(client, `${basePath}/p2_5sd`), - p25sdUsd: createIndexes(client, `${basePath}/p2_5sd_usd`), - p2sd: createIndexes(client, `${basePath}/p2sd`), - p2sdUsd: createIndexes(client, `${basePath}/p2sd_usd`), - p3sd: createIndexes(client, `${basePath}/p3sd`), - p3sdUsd: createIndexes(client, `${basePath}/p3sd_usd`), - sd: createIndexes(client, `${basePath}/sd`), - sma: createIndexes(client, `${basePath}/sma`), - zscore: createIndexes(client, `${basePath}/zscore`) - }; -} - -/** - * @typedef {Object} RealizedPattern2 - * @property {BlockCountPattern} negRealizedLoss - * @property {BlockCountPattern} netRealizedPnl - * @property {Indexes} netRealizedPnlCumulative30dDelta - * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToMarketCap - * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToRealizedCap - * @property {Indexes2} netRealizedPnlRelToRealizedCap - * @property {Indexes3} realizedCap - * @property {Indexes} realizedCap30dDelta - * @property {Indexes3} realizedCapRelToOwnMarketCap - * @property {BlockCountPattern} realizedLoss - * @property {Indexes2} realizedLossRelToRealizedCap - * @property {Indexes3} realizedPrice - * @property {ActivePriceRatioPattern} realizedPriceExtra - * @property {BlockCountPattern} realizedProfit - * @property {Indexes2} realizedProfitRelToRealizedCap - * @property {Indexes5} realizedProfitToLossRatio - * @property {Indexes3} realizedValue - * @property {Indexes5} sellSideRiskRatio - * @property {Indexes5} sellSideRiskRatio30dEma - * @property {Indexes5} sellSideRiskRatio7dEma - * @property {Indexes5} sopr - * @property {Indexes5} sopr30dEma - * @property {Indexes5} sopr7dEma - * @property {BitcoinPattern2} totalRealizedPnl - * @property {Indexes3} valueCreated - * @property {Indexes3} valueDestroyed - */ - -/** - * Create a RealizedPattern2 pattern node - * @param {BrkClientBase} client - * @param {string} basePath - * @returns {RealizedPattern2} - */ -function createRealizedPattern2(client, basePath) { - return { - negRealizedLoss: createBlockCountPattern(client, `${basePath}/neg_realized_loss`), - netRealizedPnl: createBlockCountPattern(client, `${basePath}/net_realized_pnl`), - netRealizedPnlCumulative30dDelta: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta`), - netRealizedPnlCumulative30dDeltaRelToMarketCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_market_cap`), - netRealizedPnlCumulative30dDeltaRelToRealizedCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap`), - netRealizedPnlRelToRealizedCap: createIndexes2(client, `${basePath}/net_realized_pnl_rel_to_realized_cap`), - realizedCap: createIndexes3(client, `${basePath}/realized_cap`), - realizedCap30dDelta: createIndexes(client, `${basePath}/realized_cap_30d_delta`), - realizedCapRelToOwnMarketCap: createIndexes3(client, `${basePath}/realized_cap_rel_to_own_market_cap`), - realizedLoss: createBlockCountPattern(client, `${basePath}/realized_loss`), - realizedLossRelToRealizedCap: createIndexes2(client, `${basePath}/realized_loss_rel_to_realized_cap`), - realizedPrice: createIndexes3(client, `${basePath}/realized_price`), - realizedPriceExtra: createActivePriceRatioPattern(client, `${basePath}/realized_price_extra`), - realizedProfit: createBlockCountPattern(client, `${basePath}/realized_profit`), - realizedProfitRelToRealizedCap: createIndexes2(client, `${basePath}/realized_profit_rel_to_realized_cap`), - realizedProfitToLossRatio: createIndexes5(client, `${basePath}/realized_profit_to_loss_ratio`), - realizedValue: createIndexes3(client, `${basePath}/realized_value`), - sellSideRiskRatio: createIndexes5(client, `${basePath}/sell_side_risk_ratio`), - sellSideRiskRatio30dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_30d_ema`), - sellSideRiskRatio7dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_7d_ema`), - sopr: createIndexes5(client, `${basePath}/sopr`), - sopr30dEma: createIndexes5(client, `${basePath}/sopr_30d_ema`), - sopr7dEma: createIndexes5(client, `${basePath}/sopr_7d_ema`), - totalRealizedPnl: createBitcoinPattern2(client, `${basePath}/total_realized_pnl`), - valueCreated: createIndexes3(client, `${basePath}/value_created`), - valueDestroyed: createIndexes3(client, `${basePath}/value_destroyed`) - }; -} - -/** - * @typedef {Object} RealizedPattern - * @property {BlockCountPattern} negRealizedLoss - * @property {BlockCountPattern} netRealizedPnl - * @property {Indexes} netRealizedPnlCumulative30dDelta - * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToMarketCap - * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToRealizedCap - * @property {Indexes2} netRealizedPnlRelToRealizedCap - * @property {Indexes3} realizedCap - * @property {Indexes} realizedCap30dDelta - * @property {BlockCountPattern} realizedLoss - * @property {Indexes2} realizedLossRelToRealizedCap - * @property {Indexes3} realizedPrice - * @property {RealizedPriceExtraPattern} realizedPriceExtra - * @property {BlockCountPattern} realizedProfit - * @property {Indexes2} realizedProfitRelToRealizedCap - * @property {Indexes3} realizedValue - * @property {Indexes5} sellSideRiskRatio - * @property {Indexes5} sellSideRiskRatio30dEma - * @property {Indexes5} sellSideRiskRatio7dEma - * @property {Indexes5} sopr - * @property {Indexes5} sopr30dEma - * @property {Indexes5} sopr7dEma - * @property {BitcoinPattern2} totalRealizedPnl - * @property {Indexes3} valueCreated - * @property {Indexes3} valueDestroyed - */ - -/** - * Create a RealizedPattern pattern node - * @param {BrkClientBase} client - * @param {string} basePath - * @returns {RealizedPattern} - */ -function createRealizedPattern(client, basePath) { - return { - negRealizedLoss: createBlockCountPattern(client, `${basePath}/neg_realized_loss`), - netRealizedPnl: createBlockCountPattern(client, `${basePath}/net_realized_pnl`), - netRealizedPnlCumulative30dDelta: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta`), - netRealizedPnlCumulative30dDeltaRelToMarketCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_market_cap`), - netRealizedPnlCumulative30dDeltaRelToRealizedCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap`), - netRealizedPnlRelToRealizedCap: createIndexes2(client, `${basePath}/net_realized_pnl_rel_to_realized_cap`), - realizedCap: createIndexes3(client, `${basePath}/realized_cap`), - realizedCap30dDelta: createIndexes(client, `${basePath}/realized_cap_30d_delta`), - realizedLoss: createBlockCountPattern(client, `${basePath}/realized_loss`), - realizedLossRelToRealizedCap: createIndexes2(client, `${basePath}/realized_loss_rel_to_realized_cap`), - realizedPrice: createIndexes3(client, `${basePath}/realized_price`), - realizedPriceExtra: createRealizedPriceExtraPattern(client, `${basePath}/realized_price_extra`), - realizedProfit: createBlockCountPattern(client, `${basePath}/realized_profit`), - realizedProfitRelToRealizedCap: createIndexes2(client, `${basePath}/realized_profit_rel_to_realized_cap`), - realizedValue: createIndexes3(client, `${basePath}/realized_value`), - sellSideRiskRatio: createIndexes5(client, `${basePath}/sell_side_risk_ratio`), - sellSideRiskRatio30dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_30d_ema`), - sellSideRiskRatio7dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_7d_ema`), - sopr: createIndexes5(client, `${basePath}/sopr`), - sopr30dEma: createIndexes5(client, `${basePath}/sopr_30d_ema`), - sopr7dEma: createIndexes5(client, `${basePath}/sopr_7d_ema`), - totalRealizedPnl: createBitcoinPattern2(client, `${basePath}/total_realized_pnl`), - valueCreated: createIndexes3(client, `${basePath}/value_created`), - valueDestroyed: createIndexes3(client, `${basePath}/value_destroyed`) - }; -} - -/** - * @typedef {Object} Price13dEmaPattern - * @property {Indexes} price - * @property {Indexes} ratio - * @property {Indexes} ratio1mSma - * @property {Indexes} ratio1wSma - * @property {Ratio1ySdPattern2} ratio1ySd - * @property {Ratio1ySdPattern2} ratio2ySd - * @property {Ratio1ySdPattern2} ratio4ySd - * @property {Indexes} ratioPct1 - * @property {Indexes} ratioPct1Usd - * @property {Indexes} ratioPct2 - * @property {Indexes} ratioPct2Usd - * @property {Indexes} ratioPct5 - * @property {Indexes} ratioPct5Usd - * @property {Indexes} ratioPct95 - * @property {Indexes} ratioPct95Usd - * @property {Indexes} ratioPct98 - * @property {Indexes} ratioPct98Usd - * @property {Indexes} ratioPct99 - * @property {Indexes} ratioPct99Usd - * @property {Ratio1ySdPattern2} ratioSd - */ - -/** - * Create a Price13dEmaPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {Price13dEmaPattern} - */ -function createPrice13dEmaPattern(client, acc) { - return { - price: createIndexes(client, `/${acc}`), - ratio: createIndexes(client, `/${acc}_ratio`), - ratio1mSma: createIndexes(client, `/${acc}_ratio_1m_sma`), - ratio1wSma: createIndexes(client, `/${acc}_ratio_1w_sma`), - ratio1ySd: createRatio1ySdPattern2(client, `${acc}_ratio_1y_sd`), - ratio2ySd: createRatio1ySdPattern2(client, `${acc}_ratio_2y_sd`), - ratio4ySd: createRatio1ySdPattern2(client, `${acc}_ratio_4y_sd`), - ratioPct1: createIndexes(client, `/${acc}_ratio_pct1`), - ratioPct1Usd: createIndexes(client, `/${acc}_ratio_pct1_usd`), - ratioPct2: createIndexes(client, `/${acc}_ratio_pct2`), - ratioPct2Usd: createIndexes(client, `/${acc}_ratio_pct2_usd`), - ratioPct5: createIndexes(client, `/${acc}_ratio_pct5`), - ratioPct5Usd: createIndexes(client, `/${acc}_ratio_pct5_usd`), - ratioPct95: createIndexes(client, `/${acc}_ratio_pct95`), - ratioPct95Usd: createIndexes(client, `/${acc}_ratio_pct95_usd`), - ratioPct98: createIndexes(client, `/${acc}_ratio_pct98`), - ratioPct98Usd: createIndexes(client, `/${acc}_ratio_pct98_usd`), - ratioPct99: createIndexes(client, `/${acc}_ratio_pct99`), - ratioPct99Usd: createIndexes(client, `/${acc}_ratio_pct99_usd`), - ratioSd: createRatio1ySdPattern2(client, `${acc}_ratio_sd`) - }; -} - -/** - * @typedef {Object} PricePercentilesPattern - * @property {Indexes} pct05 - * @property {Indexes} pct10 - * @property {Indexes} pct15 - * @property {Indexes} pct20 - * @property {Indexes} pct25 - * @property {Indexes} pct30 - * @property {Indexes} pct35 - * @property {Indexes} pct40 - * @property {Indexes} pct45 - * @property {Indexes} pct50 - * @property {Indexes} pct55 - * @property {Indexes} pct60 - * @property {Indexes} pct65 - * @property {Indexes} pct70 - * @property {Indexes} pct75 - * @property {Indexes} pct80 - * @property {Indexes} pct85 - * @property {Indexes} pct90 - * @property {Indexes} pct95 - */ - -/** - * Create a PricePercentilesPattern pattern node - * @param {BrkClientBase} client - * @param {string} basePath - * @returns {PricePercentilesPattern} - */ -function createPricePercentilesPattern(client, basePath) { - return { - pct05: createIndexes(client, `${basePath}/pct05`), - pct10: createIndexes(client, `${basePath}/pct10`), - pct15: createIndexes(client, `${basePath}/pct15`), - pct20: createIndexes(client, `${basePath}/pct20`), - pct25: createIndexes(client, `${basePath}/pct25`), - pct30: createIndexes(client, `${basePath}/pct30`), - pct35: createIndexes(client, `${basePath}/pct35`), - pct40: createIndexes(client, `${basePath}/pct40`), - pct45: createIndexes(client, `${basePath}/pct45`), - pct50: createIndexes(client, `${basePath}/pct50`), - pct55: createIndexes(client, `${basePath}/pct55`), - pct60: createIndexes(client, `${basePath}/pct60`), - pct65: createIndexes(client, `${basePath}/pct65`), - pct70: createIndexes(client, `${basePath}/pct70`), - pct75: createIndexes(client, `${basePath}/pct75`), - pct80: createIndexes(client, `${basePath}/pct80`), - pct85: createIndexes(client, `${basePath}/pct85`), - pct90: createIndexes(client, `${basePath}/pct90`), - pct95: createIndexes(client, `${basePath}/pct95`) - }; -} - -/** - * @typedef {Object} RelativePattern2 - * @property {Indexes27} negUnrealizedLossRelToMarketCap - * @property {Indexes27} negUnrealizedLossRelToOwnMarketCap - * @property {Indexes27} negUnrealizedLossRelToOwnTotalUnrealizedPnl - * @property {Indexes26} netUnrealizedPnlRelToMarketCap - * @property {Indexes26} netUnrealizedPnlRelToOwnMarketCap - * @property {Indexes26} netUnrealizedPnlRelToOwnTotalUnrealizedPnl - * @property {Indexes27} supplyInLossRelToCirculatingSupply - * @property {Indexes27} supplyInLossRelToOwnSupply - * @property {Indexes27} supplyInProfitRelToCirculatingSupply - * @property {Indexes27} supplyInProfitRelToOwnSupply - * @property {Indexes} supplyRelToCirculatingSupply - * @property {Indexes27} unrealizedLossRelToMarketCap - * @property {Indexes27} unrealizedLossRelToOwnMarketCap - * @property {Indexes27} unrealizedLossRelToOwnTotalUnrealizedPnl - * @property {Indexes27} unrealizedProfitRelToMarketCap - * @property {Indexes27} unrealizedProfitRelToOwnMarketCap - * @property {Indexes27} unrealizedProfitRelToOwnTotalUnrealizedPnl - */ - -/** - * Create a RelativePattern2 pattern node - * @param {BrkClientBase} client - * @param {string} basePath - * @returns {RelativePattern2} - */ -function createRelativePattern2(client, basePath) { - return { - negUnrealizedLossRelToMarketCap: createIndexes27(client, `${basePath}/neg_unrealized_loss_rel_to_market_cap`), - negUnrealizedLossRelToOwnMarketCap: createIndexes27(client, `${basePath}/neg_unrealized_loss_rel_to_own_market_cap`), - negUnrealizedLossRelToOwnTotalUnrealizedPnl: createIndexes27(client, `${basePath}/neg_unrealized_loss_rel_to_own_total_unrealized_pnl`), - netUnrealizedPnlRelToMarketCap: createIndexes26(client, `${basePath}/net_unrealized_pnl_rel_to_market_cap`), - netUnrealizedPnlRelToOwnMarketCap: createIndexes26(client, `${basePath}/net_unrealized_pnl_rel_to_own_market_cap`), - netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createIndexes26(client, `${basePath}/net_unrealized_pnl_rel_to_own_total_unrealized_pnl`), - supplyInLossRelToCirculatingSupply: createIndexes27(client, `${basePath}/supply_in_loss_rel_to_circulating_supply`), - supplyInLossRelToOwnSupply: createIndexes27(client, `${basePath}/supply_in_loss_rel_to_own_supply`), - supplyInProfitRelToCirculatingSupply: createIndexes27(client, `${basePath}/supply_in_profit_rel_to_circulating_supply`), - supplyInProfitRelToOwnSupply: createIndexes27(client, `${basePath}/supply_in_profit_rel_to_own_supply`), - supplyRelToCirculatingSupply: createIndexes(client, `${basePath}/supply_rel_to_circulating_supply`), - unrealizedLossRelToMarketCap: createIndexes27(client, `${basePath}/unrealized_loss_rel_to_market_cap`), - unrealizedLossRelToOwnMarketCap: createIndexes27(client, `${basePath}/unrealized_loss_rel_to_own_market_cap`), - unrealizedLossRelToOwnTotalUnrealizedPnl: createIndexes27(client, `${basePath}/unrealized_loss_rel_to_own_total_unrealized_pnl`), - unrealizedProfitRelToMarketCap: createIndexes27(client, `${basePath}/unrealized_profit_rel_to_market_cap`), - unrealizedProfitRelToOwnMarketCap: createIndexes27(client, `${basePath}/unrealized_profit_rel_to_own_market_cap`), - unrealizedProfitRelToOwnTotalUnrealizedPnl: createIndexes27(client, `${basePath}/unrealized_profit_rel_to_own_total_unrealized_pnl`) + adjustedSopr: createMetricPattern21(client, _m(acc, 'adjusted_sopr')), + adjustedSopr30dEma: createMetricPattern21(client, _m(acc, 'adjusted_sopr_30d_ema')), + adjustedSopr7dEma: createMetricPattern21(client, _m(acc, 'adjusted_sopr_7d_ema')), + adjustedValueCreated: createMetricPattern1(client, _m(acc, 'adjusted_value_created')), + adjustedValueDestroyed: createMetricPattern1(client, _m(acc, 'adjusted_value_destroyed')), + mvrv: createMetricPattern4(client, _m(acc, 'mvrv')), + negRealizedLoss: createBlockCountPattern(client, _m(acc, 'neg_realized_loss')), + netRealizedPnl: createBlockCountPattern(client, _m(acc, 'net_realized_pnl')), + netRealizedPnlCumulative30dDelta: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), + netRealizedPnlRelToRealizedCap: createMetricPattern25(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')), + realizedCap: createMetricPattern1(client, _m(acc, 'realized_cap')), + realizedCap30dDelta: createMetricPattern4(client, _m(acc, 'realized_cap_30d_delta')), + realizedCapRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap')), + realizedLoss: createBlockCountPattern(client, _m(acc, 'realized_loss')), + realizedLossRelToRealizedCap: createMetricPattern25(client, _m(acc, 'realized_loss_rel_to_realized_cap')), + realizedPrice: createMetricPattern1(client, _m(acc, 'realized_price')), + realizedPriceExtra: createActivePriceRatioPattern(client, _m(acc, 'realized_price_ratio')), + realizedProfit: createBlockCountPattern(client, _m(acc, 'realized_profit')), + realizedProfitRelToRealizedCap: createMetricPattern25(client, _m(acc, 'realized_profit_rel_to_realized_cap')), + realizedProfitToLossRatio: createMetricPattern21(client, _m(acc, 'realized_profit_to_loss_ratio')), + realizedValue: createMetricPattern1(client, _m(acc, 'realized_value')), + sellSideRiskRatio: createMetricPattern21(client, _m(acc, 'sell_side_risk_ratio')), + sellSideRiskRatio30dEma: createMetricPattern21(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), + sellSideRiskRatio7dEma: createMetricPattern21(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), + sopr: createMetricPattern21(client, _m(acc, 'sopr')), + sopr30dEma: createMetricPattern21(client, _m(acc, 'sopr_30d_ema')), + sopr7dEma: createMetricPattern21(client, _m(acc, 'sopr_7d_ema')), + totalRealizedPnl: createTotalRealizedPnlPattern(client, _m(acc, 'total_realized_pnl')), + valueCreated: createMetricPattern1(client, _m(acc, 'value_created')), + valueDestroyed: createMetricPattern1(client, _m(acc, 'value_destroyed')), }; } /** * @typedef {Object} Ratio1ySdPattern - * @property {Indexes} m05sd - * @property {Indexes} m15sd - * @property {Indexes} m1sd - * @property {Indexes} m25sd - * @property {Indexes} m2sd - * @property {Indexes} m3sd - * @property {Indexes} p05sd - * @property {Indexes} p15sd - * @property {Indexes} p1sd - * @property {Indexes} p25sd - * @property {Indexes} p2sd - * @property {Indexes} p3sd - * @property {Indexes} sd - * @property {Indexes} sma - * @property {Indexes} zscore + * @property {MetricPattern4} _0sdUsd + * @property {MetricPattern4} m05sd + * @property {MetricPattern4} m05sdUsd + * @property {MetricPattern4} m15sd + * @property {MetricPattern4} m15sdUsd + * @property {MetricPattern4} m1sd + * @property {MetricPattern4} m1sdUsd + * @property {MetricPattern4} m25sd + * @property {MetricPattern4} m25sdUsd + * @property {MetricPattern4} m2sd + * @property {MetricPattern4} m2sdUsd + * @property {MetricPattern4} m3sd + * @property {MetricPattern4} m3sdUsd + * @property {MetricPattern4} p05sd + * @property {MetricPattern4} p05sdUsd + * @property {MetricPattern4} p15sd + * @property {MetricPattern4} p15sdUsd + * @property {MetricPattern4} p1sd + * @property {MetricPattern4} p1sdUsd + * @property {MetricPattern4} p25sd + * @property {MetricPattern4} p25sdUsd + * @property {MetricPattern4} p2sd + * @property {MetricPattern4} p2sdUsd + * @property {MetricPattern4} p3sd + * @property {MetricPattern4} p3sdUsd + * @property {MetricPattern4} sd + * @property {MetricPattern4} sma + * @property {MetricPattern4} zscore */ /** * Create a Ratio1ySdPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {Ratio1ySdPattern} */ -function createRatio1ySdPattern(client, basePath) { +function createRatio1ySdPattern(client, acc) { return { - m05sd: createIndexes(client, `${basePath}/m0_5sd`), - m15sd: createIndexes(client, `${basePath}/m1_5sd`), - m1sd: createIndexes(client, `${basePath}/m1sd`), - m25sd: createIndexes(client, `${basePath}/m2_5sd`), - m2sd: createIndexes(client, `${basePath}/m2sd`), - m3sd: createIndexes(client, `${basePath}/m3sd`), - p05sd: createIndexes(client, `${basePath}/p0_5sd`), - p15sd: createIndexes(client, `${basePath}/p1_5sd`), - p1sd: createIndexes(client, `${basePath}/p1sd`), - p25sd: createIndexes(client, `${basePath}/p2_5sd`), - p2sd: createIndexes(client, `${basePath}/p2sd`), - p3sd: createIndexes(client, `${basePath}/p3sd`), - sd: createIndexes(client, `${basePath}/sd`), - sma: createIndexes(client, `${basePath}/sma`), - zscore: createIndexes(client, `${basePath}/zscore`) + _0sdUsd: createMetricPattern4(client, _m(acc, '0sd_usd')), + m05sd: createMetricPattern4(client, _m(acc, 'm0_5sd')), + m05sdUsd: createMetricPattern4(client, _m(acc, 'm0_5sd_usd')), + m15sd: createMetricPattern4(client, _m(acc, 'm1_5sd')), + m15sdUsd: createMetricPattern4(client, _m(acc, 'm1_5sd_usd')), + m1sd: createMetricPattern4(client, _m(acc, 'm1sd')), + m1sdUsd: createMetricPattern4(client, _m(acc, 'm1sd_usd')), + m25sd: createMetricPattern4(client, _m(acc, 'm2_5sd')), + m25sdUsd: createMetricPattern4(client, _m(acc, 'm2_5sd_usd')), + m2sd: createMetricPattern4(client, _m(acc, 'm2sd')), + m2sdUsd: createMetricPattern4(client, _m(acc, 'm2sd_usd')), + m3sd: createMetricPattern4(client, _m(acc, 'm3sd')), + m3sdUsd: createMetricPattern4(client, _m(acc, 'm3sd_usd')), + p05sd: createMetricPattern4(client, _m(acc, 'p0_5sd')), + p05sdUsd: createMetricPattern4(client, _m(acc, 'p0_5sd_usd')), + p15sd: createMetricPattern4(client, _m(acc, 'p1_5sd')), + p15sdUsd: createMetricPattern4(client, _m(acc, 'p1_5sd_usd')), + p1sd: createMetricPattern4(client, _m(acc, 'p1sd')), + p1sdUsd: createMetricPattern4(client, _m(acc, 'p1sd_usd')), + p25sd: createMetricPattern4(client, _m(acc, 'p2_5sd')), + p25sdUsd: createMetricPattern4(client, _m(acc, 'p2_5sd_usd')), + p2sd: createMetricPattern4(client, _m(acc, 'p2sd')), + p2sdUsd: createMetricPattern4(client, _m(acc, 'p2sd_usd')), + p3sd: createMetricPattern4(client, _m(acc, 'p3sd')), + p3sdUsd: createMetricPattern4(client, _m(acc, 'p3sd_usd')), + sd: createMetricPattern4(client, _m(acc, 'sd')), + sma: createMetricPattern4(client, _m(acc, 'sma')), + zscore: createMetricPattern4(client, _m(acc, 'zscore')), }; } /** - * @typedef {Object} AXbtPattern - * @property {BlockCountPattern} _1dDominance - * @property {Indexes} _1mBlocksMined - * @property {Indexes} _1mDominance - * @property {Indexes} _1wBlocksMined - * @property {Indexes} _1wDominance - * @property {Indexes} _1yBlocksMined - * @property {Indexes} _1yDominance - * @property {BlockCountPattern} blocksMined - * @property {UnclaimedRewardsPattern} coinbase - * @property {Indexes} daysSinceBlock - * @property {BlockCountPattern} dominance - * @property {FeePattern2} fee - * @property {FeePattern2} subsidy + * @typedef {Object} RealizedPattern2 + * @property {MetricPattern4} mvrv + * @property {BlockCountPattern} negRealizedLoss + * @property {BlockCountPattern} netRealizedPnl + * @property {MetricPattern4} netRealizedPnlCumulative30dDelta + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {MetricPattern25} netRealizedPnlRelToRealizedCap + * @property {MetricPattern1} realizedCap + * @property {MetricPattern4} realizedCap30dDelta + * @property {MetricPattern1} realizedCapRelToOwnMarketCap + * @property {BlockCountPattern} realizedLoss + * @property {MetricPattern25} realizedLossRelToRealizedCap + * @property {MetricPattern1} realizedPrice + * @property {ActivePriceRatioPattern} realizedPriceExtra + * @property {BlockCountPattern} realizedProfit + * @property {MetricPattern25} realizedProfitRelToRealizedCap + * @property {MetricPattern21} realizedProfitToLossRatio + * @property {MetricPattern1} realizedValue + * @property {MetricPattern21} sellSideRiskRatio + * @property {MetricPattern21} sellSideRiskRatio30dEma + * @property {MetricPattern21} sellSideRiskRatio7dEma + * @property {MetricPattern21} sopr + * @property {MetricPattern21} sopr30dEma + * @property {MetricPattern21} sopr7dEma + * @property {TotalRealizedPnlPattern} totalRealizedPnl + * @property {MetricPattern1} valueCreated + * @property {MetricPattern1} valueDestroyed */ /** - * Create a AXbtPattern pattern node + * Create a RealizedPattern2 pattern node * @param {BrkClientBase} client - * @param {string} basePath - * @returns {AXbtPattern} + * @param {string} acc - Accumulated metric name + * @returns {RealizedPattern2} */ -function createAXbtPattern(client, basePath) { +function createRealizedPattern2(client, acc) { return { - _1dDominance: createBlockCountPattern(client, `${basePath}/1d_dominance`), - _1mBlocksMined: createIndexes(client, `${basePath}/1m_blocks_mined`), - _1mDominance: createIndexes(client, `${basePath}/1m_dominance`), - _1wBlocksMined: createIndexes(client, `${basePath}/1w_blocks_mined`), - _1wDominance: createIndexes(client, `${basePath}/1w_dominance`), - _1yBlocksMined: createIndexes(client, `${basePath}/1y_blocks_mined`), - _1yDominance: createIndexes(client, `${basePath}/1y_dominance`), - blocksMined: createBlockCountPattern(client, `${basePath}/blocks_mined`), - coinbase: createUnclaimedRewardsPattern(client, `${basePath}/coinbase`), - daysSinceBlock: createIndexes(client, `${basePath}/days_since_block`), - dominance: createBlockCountPattern(client, `${basePath}/dominance`), - fee: createFeePattern2(client, `${basePath}/fee`), - subsidy: createFeePattern2(client, `${basePath}/subsidy`) + mvrv: createMetricPattern4(client, _m(acc, 'mvrv')), + negRealizedLoss: createBlockCountPattern(client, _m(acc, 'neg_realized_loss')), + netRealizedPnl: createBlockCountPattern(client, _m(acc, 'net_realized_pnl')), + netRealizedPnlCumulative30dDelta: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), + netRealizedPnlRelToRealizedCap: createMetricPattern25(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')), + realizedCap: createMetricPattern1(client, _m(acc, 'realized_cap')), + realizedCap30dDelta: createMetricPattern4(client, _m(acc, 'realized_cap_30d_delta')), + realizedCapRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap')), + realizedLoss: createBlockCountPattern(client, _m(acc, 'realized_loss')), + realizedLossRelToRealizedCap: createMetricPattern25(client, _m(acc, 'realized_loss_rel_to_realized_cap')), + realizedPrice: createMetricPattern1(client, _m(acc, 'realized_price')), + realizedPriceExtra: createActivePriceRatioPattern(client, _m(acc, 'realized_price_ratio')), + realizedProfit: createBlockCountPattern(client, _m(acc, 'realized_profit')), + realizedProfitRelToRealizedCap: createMetricPattern25(client, _m(acc, 'realized_profit_rel_to_realized_cap')), + realizedProfitToLossRatio: createMetricPattern21(client, _m(acc, 'realized_profit_to_loss_ratio')), + realizedValue: createMetricPattern1(client, _m(acc, 'realized_value')), + sellSideRiskRatio: createMetricPattern21(client, _m(acc, 'sell_side_risk_ratio')), + sellSideRiskRatio30dEma: createMetricPattern21(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), + sellSideRiskRatio7dEma: createMetricPattern21(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), + sopr: createMetricPattern21(client, _m(acc, 'sopr')), + sopr30dEma: createMetricPattern21(client, _m(acc, 'sopr_30d_ema')), + sopr7dEma: createMetricPattern21(client, _m(acc, 'sopr_7d_ema')), + totalRealizedPnl: createTotalRealizedPnlPattern(client, _m(acc, 'total_realized_pnl')), + valueCreated: createMetricPattern1(client, _m(acc, 'value_created')), + valueDestroyed: createMetricPattern1(client, _m(acc, 'value_destroyed')), + }; +} + +/** + * @typedef {Object} RealizedPattern + * @property {MetricPattern4} mvrv + * @property {BlockCountPattern} negRealizedLoss + * @property {BlockCountPattern} netRealizedPnl + * @property {MetricPattern4} netRealizedPnlCumulative30dDelta + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {MetricPattern25} netRealizedPnlRelToRealizedCap + * @property {MetricPattern1} realizedCap + * @property {MetricPattern4} realizedCap30dDelta + * @property {BlockCountPattern} realizedLoss + * @property {MetricPattern25} realizedLossRelToRealizedCap + * @property {MetricPattern1} realizedPrice + * @property {RealizedPriceExtraPattern} realizedPriceExtra + * @property {BlockCountPattern} realizedProfit + * @property {MetricPattern25} realizedProfitRelToRealizedCap + * @property {MetricPattern1} realizedValue + * @property {MetricPattern21} sellSideRiskRatio + * @property {MetricPattern21} sellSideRiskRatio30dEma + * @property {MetricPattern21} sellSideRiskRatio7dEma + * @property {MetricPattern21} sopr + * @property {MetricPattern21} sopr30dEma + * @property {MetricPattern21} sopr7dEma + * @property {TotalRealizedPnlPattern} totalRealizedPnl + * @property {MetricPattern1} valueCreated + * @property {MetricPattern1} valueDestroyed + */ + +/** + * Create a RealizedPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {RealizedPattern} + */ +function createRealizedPattern(client, acc) { + return { + mvrv: createMetricPattern4(client, _m(acc, 'mvrv')), + negRealizedLoss: createBlockCountPattern(client, _m(acc, 'neg_realized_loss')), + netRealizedPnl: createBlockCountPattern(client, _m(acc, 'net_realized_pnl')), + netRealizedPnlCumulative30dDelta: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), + netRealizedPnlRelToRealizedCap: createMetricPattern25(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')), + realizedCap: createMetricPattern1(client, _m(acc, 'realized_cap')), + realizedCap30dDelta: createMetricPattern4(client, _m(acc, 'realized_cap_30d_delta')), + realizedLoss: createBlockCountPattern(client, _m(acc, 'realized_loss')), + realizedLossRelToRealizedCap: createMetricPattern25(client, _m(acc, 'realized_loss_rel_to_realized_cap')), + realizedPrice: createMetricPattern1(client, _m(acc, 'realized_price')), + realizedPriceExtra: createRealizedPriceExtraPattern(client, _m(acc, 'realized_price')), + realizedProfit: createBlockCountPattern(client, _m(acc, 'realized_profit')), + realizedProfitRelToRealizedCap: createMetricPattern25(client, _m(acc, 'realized_profit_rel_to_realized_cap')), + realizedValue: createMetricPattern1(client, _m(acc, 'realized_value')), + sellSideRiskRatio: createMetricPattern21(client, _m(acc, 'sell_side_risk_ratio')), + sellSideRiskRatio30dEma: createMetricPattern21(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), + sellSideRiskRatio7dEma: createMetricPattern21(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), + sopr: createMetricPattern21(client, _m(acc, 'sopr')), + sopr30dEma: createMetricPattern21(client, _m(acc, 'sopr_30d_ema')), + sopr7dEma: createMetricPattern21(client, _m(acc, 'sopr_7d_ema')), + totalRealizedPnl: createTotalRealizedPnlPattern(client, _m(acc, 'total_realized_pnl')), + valueCreated: createMetricPattern1(client, _m(acc, 'value_created')), + valueDestroyed: createMetricPattern1(client, _m(acc, 'value_destroyed')), + }; +} + +/** + * @typedef {Object} Price111dSmaPattern + * @property {MetricPattern4} price + * @property {MetricPattern4} ratio + * @property {MetricPattern4} ratio1mSma + * @property {MetricPattern4} ratio1wSma + * @property {Ratio1ySdPattern} ratio1ySd + * @property {Ratio1ySdPattern} ratio2ySd + * @property {Ratio1ySdPattern} ratio4ySd + * @property {MetricPattern4} ratioPct1 + * @property {MetricPattern4} ratioPct1Usd + * @property {MetricPattern4} ratioPct2 + * @property {MetricPattern4} ratioPct2Usd + * @property {MetricPattern4} ratioPct5 + * @property {MetricPattern4} ratioPct5Usd + * @property {MetricPattern4} ratioPct95 + * @property {MetricPattern4} ratioPct95Usd + * @property {MetricPattern4} ratioPct98 + * @property {MetricPattern4} ratioPct98Usd + * @property {MetricPattern4} ratioPct99 + * @property {MetricPattern4} ratioPct99Usd + * @property {Ratio1ySdPattern} ratioSd + */ + +/** + * Create a Price111dSmaPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {Price111dSmaPattern} + */ +function createPrice111dSmaPattern(client, acc) { + return { + price: createMetricPattern4(client, acc), + ratio: createMetricPattern4(client, _m(acc, 'ratio')), + ratio1mSma: createMetricPattern4(client, _m(acc, 'ratio_1m_sma')), + ratio1wSma: createMetricPattern4(client, _m(acc, 'ratio_1w_sma')), + ratio1ySd: createRatio1ySdPattern(client, _m(acc, 'ratio_1y')), + ratio2ySd: createRatio1ySdPattern(client, _m(acc, 'ratio_2y')), + ratio4ySd: createRatio1ySdPattern(client, _m(acc, 'ratio_4y')), + ratioPct1: createMetricPattern4(client, _m(acc, 'ratio_pct1')), + ratioPct1Usd: createMetricPattern4(client, _m(acc, 'ratio_pct1_usd')), + ratioPct2: createMetricPattern4(client, _m(acc, 'ratio_pct2')), + ratioPct2Usd: createMetricPattern4(client, _m(acc, 'ratio_pct2_usd')), + ratioPct5: createMetricPattern4(client, _m(acc, 'ratio_pct5')), + ratioPct5Usd: createMetricPattern4(client, _m(acc, 'ratio_pct5_usd')), + ratioPct95: createMetricPattern4(client, _m(acc, 'ratio_pct95')), + ratioPct95Usd: createMetricPattern4(client, _m(acc, 'ratio_pct95_usd')), + ratioPct98: createMetricPattern4(client, _m(acc, 'ratio_pct98')), + ratioPct98Usd: createMetricPattern4(client, _m(acc, 'ratio_pct98_usd')), + ratioPct99: createMetricPattern4(client, _m(acc, 'ratio_pct99')), + ratioPct99Usd: createMetricPattern4(client, _m(acc, 'ratio_pct99_usd')), + ratioSd: createRatio1ySdPattern(client, _m(acc, 'ratio')), + }; +} + +/** + * @typedef {Object} PercentilesPattern + * @property {MetricPattern4} costBasisPct05 + * @property {MetricPattern4} costBasisPct10 + * @property {MetricPattern4} costBasisPct15 + * @property {MetricPattern4} costBasisPct20 + * @property {MetricPattern4} costBasisPct25 + * @property {MetricPattern4} costBasisPct30 + * @property {MetricPattern4} costBasisPct35 + * @property {MetricPattern4} costBasisPct40 + * @property {MetricPattern4} costBasisPct45 + * @property {MetricPattern4} costBasisPct50 + * @property {MetricPattern4} costBasisPct55 + * @property {MetricPattern4} costBasisPct60 + * @property {MetricPattern4} costBasisPct65 + * @property {MetricPattern4} costBasisPct70 + * @property {MetricPattern4} costBasisPct75 + * @property {MetricPattern4} costBasisPct80 + * @property {MetricPattern4} costBasisPct85 + * @property {MetricPattern4} costBasisPct90 + * @property {MetricPattern4} costBasisPct95 + */ + +/** + * Create a PercentilesPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {PercentilesPattern} + */ +function createPercentilesPattern(client, acc) { + return { + costBasisPct05: createMetricPattern4(client, _m(acc, 'pct05')), + costBasisPct10: createMetricPattern4(client, _m(acc, 'pct10')), + costBasisPct15: createMetricPattern4(client, _m(acc, 'pct15')), + costBasisPct20: createMetricPattern4(client, _m(acc, 'pct20')), + costBasisPct25: createMetricPattern4(client, _m(acc, 'pct25')), + costBasisPct30: createMetricPattern4(client, _m(acc, 'pct30')), + costBasisPct35: createMetricPattern4(client, _m(acc, 'pct35')), + costBasisPct40: createMetricPattern4(client, _m(acc, 'pct40')), + costBasisPct45: createMetricPattern4(client, _m(acc, 'pct45')), + costBasisPct50: createMetricPattern4(client, _m(acc, 'pct50')), + costBasisPct55: createMetricPattern4(client, _m(acc, 'pct55')), + costBasisPct60: createMetricPattern4(client, _m(acc, 'pct60')), + costBasisPct65: createMetricPattern4(client, _m(acc, 'pct65')), + costBasisPct70: createMetricPattern4(client, _m(acc, 'pct70')), + costBasisPct75: createMetricPattern4(client, _m(acc, 'pct75')), + costBasisPct80: createMetricPattern4(client, _m(acc, 'pct80')), + costBasisPct85: createMetricPattern4(client, _m(acc, 'pct85')), + costBasisPct90: createMetricPattern4(client, _m(acc, 'pct90')), + costBasisPct95: createMetricPattern4(client, _m(acc, 'pct95')), }; } /** * @typedef {Object} ActivePriceRatioPattern - * @property {Indexes} ratio - * @property {Indexes} ratio1mSma - * @property {Indexes} ratio1wSma + * @property {MetricPattern4} ratio + * @property {MetricPattern4} ratio1mSma + * @property {MetricPattern4} ratio1wSma * @property {Ratio1ySdPattern} ratio1ySd * @property {Ratio1ySdPattern} ratio2ySd * @property {Ratio1ySdPattern} ratio4ySd - * @property {Indexes} ratioPct1 - * @property {Indexes} ratioPct2 - * @property {Indexes} ratioPct5 - * @property {Indexes} ratioPct95 - * @property {Indexes} ratioPct98 - * @property {Indexes} ratioPct99 + * @property {MetricPattern4} ratioPct1 + * @property {MetricPattern4} ratioPct1Usd + * @property {MetricPattern4} ratioPct2 + * @property {MetricPattern4} ratioPct2Usd + * @property {MetricPattern4} ratioPct5 + * @property {MetricPattern4} ratioPct5Usd + * @property {MetricPattern4} ratioPct95 + * @property {MetricPattern4} ratioPct95Usd + * @property {MetricPattern4} ratioPct98 + * @property {MetricPattern4} ratioPct98Usd + * @property {MetricPattern4} ratioPct99 + * @property {MetricPattern4} ratioPct99Usd * @property {Ratio1ySdPattern} ratioSd */ /** * Create a ActivePriceRatioPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {ActivePriceRatioPattern} */ -function createActivePriceRatioPattern(client, basePath) { +function createActivePriceRatioPattern(client, acc) { return { - ratio: createIndexes(client, `${basePath}/ratio`), - ratio1mSma: createIndexes(client, `${basePath}/ratio_1m_sma`), - ratio1wSma: createIndexes(client, `${basePath}/ratio_1w_sma`), - ratio1ySd: createRatio1ySdPattern(client, `${basePath}/ratio_1y_sd`), - ratio2ySd: createRatio1ySdPattern(client, `${basePath}/ratio_2y_sd`), - ratio4ySd: createRatio1ySdPattern(client, `${basePath}/ratio_4y_sd`), - ratioPct1: createIndexes(client, `${basePath}/ratio_pct1`), - ratioPct2: createIndexes(client, `${basePath}/ratio_pct2`), - ratioPct5: createIndexes(client, `${basePath}/ratio_pct5`), - ratioPct95: createIndexes(client, `${basePath}/ratio_pct95`), - ratioPct98: createIndexes(client, `${basePath}/ratio_pct98`), - ratioPct99: createIndexes(client, `${basePath}/ratio_pct99`), - ratioSd: createRatio1ySdPattern(client, `${basePath}/ratio_sd`) + ratio: createMetricPattern4(client, acc), + ratio1mSma: createMetricPattern4(client, _m(acc, '1m_sma')), + ratio1wSma: createMetricPattern4(client, _m(acc, '1w_sma')), + ratio1ySd: createRatio1ySdPattern(client, _m(acc, '1y')), + ratio2ySd: createRatio1ySdPattern(client, _m(acc, '2y')), + ratio4ySd: createRatio1ySdPattern(client, _m(acc, '4y')), + ratioPct1: createMetricPattern4(client, _m(acc, 'pct1')), + ratioPct1Usd: createMetricPattern4(client, _m(acc, 'pct1_usd')), + ratioPct2: createMetricPattern4(client, _m(acc, 'pct2')), + ratioPct2Usd: createMetricPattern4(client, _m(acc, 'pct2_usd')), + ratioPct5: createMetricPattern4(client, _m(acc, 'pct5')), + ratioPct5Usd: createMetricPattern4(client, _m(acc, 'pct5_usd')), + ratioPct95: createMetricPattern4(client, _m(acc, 'pct95')), + ratioPct95Usd: createMetricPattern4(client, _m(acc, 'pct95_usd')), + ratioPct98: createMetricPattern4(client, _m(acc, 'pct98')), + ratioPct98Usd: createMetricPattern4(client, _m(acc, 'pct98_usd')), + ratioPct99: createMetricPattern4(client, _m(acc, 'pct99')), + ratioPct99Usd: createMetricPattern4(client, _m(acc, 'pct99_usd')), + ratioSd: createRatio1ySdPattern(client, acc), + }; +} + +/** + * @typedef {Object} RelativePattern2 + * @property {MetricPattern5} negUnrealizedLossRelToMarketCap + * @property {MetricPattern5} negUnrealizedLossRelToOwnMarketCap + * @property {MetricPattern5} negUnrealizedLossRelToOwnTotalUnrealizedPnl + * @property {MetricPattern3} netUnrealizedPnlRelToMarketCap + * @property {MetricPattern3} netUnrealizedPnlRelToOwnMarketCap + * @property {MetricPattern3} netUnrealizedPnlRelToOwnTotalUnrealizedPnl + * @property {MetricPattern4} nupl + * @property {MetricPattern5} supplyInLossRelToCirculatingSupply + * @property {MetricPattern5} supplyInLossRelToOwnSupply + * @property {MetricPattern5} supplyInProfitRelToCirculatingSupply + * @property {MetricPattern5} supplyInProfitRelToOwnSupply + * @property {MetricPattern4} supplyRelToCirculatingSupply + * @property {MetricPattern5} unrealizedLossRelToMarketCap + * @property {MetricPattern5} unrealizedLossRelToOwnMarketCap + * @property {MetricPattern5} unrealizedLossRelToOwnTotalUnrealizedPnl + * @property {MetricPattern5} unrealizedProfitRelToMarketCap + * @property {MetricPattern5} unrealizedProfitRelToOwnMarketCap + * @property {MetricPattern5} unrealizedProfitRelToOwnTotalUnrealizedPnl + */ + +/** + * Create a RelativePattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {RelativePattern2} + */ +function createRelativePattern2(client, acc) { + return { + negUnrealizedLossRelToMarketCap: createMetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_market_cap')), + negUnrealizedLossRelToOwnMarketCap: createMetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap')), + negUnrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl')), + netUnrealizedPnlRelToMarketCap: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_market_cap')), + netUnrealizedPnlRelToOwnMarketCap: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')), + netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')), + nupl: createMetricPattern4(client, _m(acc, 'nupl')), + supplyInLossRelToCirculatingSupply: createMetricPattern5(client, _m(acc, 'supply_in_loss_rel_to_circulating_supply')), + supplyInLossRelToOwnSupply: createMetricPattern5(client, _m(acc, 'supply_in_loss_rel_to_own_supply')), + supplyInProfitRelToCirculatingSupply: createMetricPattern5(client, _m(acc, 'supply_in_profit_rel_to_circulating_supply')), + supplyInProfitRelToOwnSupply: createMetricPattern5(client, _m(acc, 'supply_in_profit_rel_to_own_supply')), + supplyRelToCirculatingSupply: createMetricPattern4(client, _m(acc, 'supply_rel_to_circulating_supply')), + unrealizedLossRelToMarketCap: createMetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_market_cap')), + unrealizedLossRelToOwnMarketCap: createMetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')), + unrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')), + unrealizedProfitRelToMarketCap: createMetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_market_cap')), + unrealizedProfitRelToOwnMarketCap: createMetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap')), + unrealizedProfitRelToOwnTotalUnrealizedPnl: createMetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl')), + }; +} + +/** + * @typedef {Object} AXbtPattern + * @property {BlockCountPattern} _1dDominance + * @property {MetricPattern4} _1mBlocksMined + * @property {MetricPattern4} _1mDominance + * @property {MetricPattern4} _1wBlocksMined + * @property {MetricPattern4} _1wDominance + * @property {MetricPattern4} _1yBlocksMined + * @property {MetricPattern4} _1yDominance + * @property {BlockCountPattern} blocksMined + * @property {UnclaimedRewardsPattern} coinbase + * @property {MetricPattern4} daysSinceBlock + * @property {BlockCountPattern} dominance + * @property {SentPattern} fee + * @property {SentPattern} subsidy + */ + +/** + * Create a AXbtPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {AXbtPattern} + */ +function createAXbtPattern(client, acc) { + return { + _1dDominance: createBlockCountPattern(client, _m(acc, '1d_dominance')), + _1mBlocksMined: createMetricPattern4(client, _m(acc, '1m_blocks_mined')), + _1mDominance: createMetricPattern4(client, _m(acc, '1m_dominance')), + _1wBlocksMined: createMetricPattern4(client, _m(acc, '1w_blocks_mined')), + _1wDominance: createMetricPattern4(client, _m(acc, '1w_dominance')), + _1yBlocksMined: createMetricPattern4(client, _m(acc, '1y_blocks_mined')), + _1yDominance: createMetricPattern4(client, _m(acc, '1y_dominance')), + blocksMined: createBlockCountPattern(client, _m(acc, 'blocks_mined')), + coinbase: createUnclaimedRewardsPattern(client, _m(acc, 'coinbase')), + daysSinceBlock: createMetricPattern4(client, _m(acc, 'days_since_block')), + dominance: createBlockCountPattern(client, _m(acc, 'dominance')), + fee: createSentPattern(client, _m(acc, 'fee')), + subsidy: createSentPattern(client, _m(acc, 'subsidy')), }; } /** * @template T * @typedef {Object} BitcoinPattern - * @property {Indexes4} average - * @property {Indexes2} base - * @property {Indexes3} cumulative - * @property {Indexes4} max - * @property {Indexes5} median - * @property {Indexes4} min - * @property {Indexes5} pct10 - * @property {Indexes5} pct25 - * @property {Indexes5} pct75 - * @property {Indexes5} pct90 - * @property {Indexes4} sum + * @property {MetricPattern2} average + * @property {MetricPattern25} base + * @property {MetricPattern1} cumulative + * @property {MetricPattern2} max + * @property {MetricPattern21} median + * @property {MetricPattern2} min + * @property {MetricPattern21} pct10 + * @property {MetricPattern21} pct25 + * @property {MetricPattern21} pct75 + * @property {MetricPattern21} pct90 + * @property {MetricPattern2} sum */ /** * Create a BitcoinPattern pattern node * @template T * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {BitcoinPattern} */ -function createBitcoinPattern(client, basePath) { +function createBitcoinPattern(client, acc) { return { - average: createIndexes4(client, `${basePath}/average`), - base: createIndexes2(client, `${basePath}/base`), - cumulative: createIndexes3(client, `${basePath}/cumulative`), - max: createIndexes4(client, `${basePath}/max`), - median: createIndexes5(client, `${basePath}/median`), - min: createIndexes4(client, `${basePath}/min`), - pct10: createIndexes5(client, `${basePath}/pct10`), - pct25: createIndexes5(client, `${basePath}/pct25`), - pct75: createIndexes5(client, `${basePath}/pct75`), - pct90: createIndexes5(client, `${basePath}/pct90`), - sum: createIndexes4(client, `${basePath}/sum`) + average: createMetricPattern2(client, _m(acc, 'avg')), + base: createMetricPattern25(client, acc), + cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), + max: createMetricPattern2(client, _m(acc, 'max')), + median: createMetricPattern21(client, _m(acc, 'median')), + min: createMetricPattern2(client, _m(acc, 'min')), + pct10: createMetricPattern21(client, _m(acc, 'pct10')), + pct25: createMetricPattern21(client, _m(acc, 'pct25')), + pct75: createMetricPattern21(client, _m(acc, 'pct75')), + pct90: createMetricPattern21(client, _m(acc, 'pct90')), + sum: createMetricPattern2(client, _m(acc, 'sum')), + }; +} + +/** + * @typedef {Object} RelativePattern + * @property {MetricPattern5} negUnrealizedLossRelToMarketCap + * @property {MetricPattern3} netUnrealizedPnlRelToMarketCap + * @property {MetricPattern4} nupl + * @property {MetricPattern5} supplyInLossRelToCirculatingSupply + * @property {MetricPattern5} supplyInLossRelToOwnSupply + * @property {MetricPattern5} supplyInProfitRelToCirculatingSupply + * @property {MetricPattern5} supplyInProfitRelToOwnSupply + * @property {MetricPattern4} supplyRelToCirculatingSupply + * @property {MetricPattern5} unrealizedLossRelToMarketCap + * @property {MetricPattern5} unrealizedProfitRelToMarketCap + */ + +/** + * Create a RelativePattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {RelativePattern} + */ +function createRelativePattern(client, acc) { + return { + negUnrealizedLossRelToMarketCap: createMetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_market_cap')), + netUnrealizedPnlRelToMarketCap: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_market_cap')), + nupl: createMetricPattern4(client, _m(acc, 'nupl')), + supplyInLossRelToCirculatingSupply: createMetricPattern5(client, _m(acc, 'supply_in_loss_rel_to_circulating_supply')), + supplyInLossRelToOwnSupply: createMetricPattern5(client, _m(acc, 'supply_in_loss_rel_to_own_supply')), + supplyInProfitRelToCirculatingSupply: createMetricPattern5(client, _m(acc, 'supply_in_profit_rel_to_circulating_supply')), + supplyInProfitRelToOwnSupply: createMetricPattern5(client, _m(acc, 'supply_in_profit_rel_to_own_supply')), + supplyRelToCirculatingSupply: createMetricPattern4(client, _m(acc, 'supply_rel_to_circulating_supply')), + unrealizedLossRelToMarketCap: createMetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_market_cap')), + unrealizedProfitRelToMarketCap: createMetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_market_cap')), }; } /** * @template T * @typedef {Object} BlockSizePattern - * @property {Indexes4} average - * @property {Indexes3} cumulative - * @property {Indexes4} max - * @property {Indexes5} median - * @property {Indexes4} min - * @property {Indexes5} pct10 - * @property {Indexes5} pct25 - * @property {Indexes5} pct75 - * @property {Indexes5} pct90 - * @property {Indexes4} sum + * @property {MetricPattern1} average + * @property {MetricPattern1} cumulative + * @property {MetricPattern1} max + * @property {MetricPattern25} median + * @property {MetricPattern1} min + * @property {MetricPattern25} pct10 + * @property {MetricPattern25} pct25 + * @property {MetricPattern25} pct75 + * @property {MetricPattern25} pct90 + * @property {MetricPattern1} sum */ /** * Create a BlockSizePattern pattern node * @template T * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {BlockSizePattern} */ -function createBlockSizePattern(client, basePath) { +function createBlockSizePattern(client, acc) { return { - average: createIndexes4(client, `${basePath}/average`), - cumulative: createIndexes3(client, `${basePath}/cumulative`), - max: createIndexes4(client, `${basePath}/max`), - median: createIndexes5(client, `${basePath}/median`), - min: createIndexes4(client, `${basePath}/min`), - pct10: createIndexes5(client, `${basePath}/pct10`), - pct25: createIndexes5(client, `${basePath}/pct25`), - pct75: createIndexes5(client, `${basePath}/pct75`), - pct90: createIndexes5(client, `${basePath}/pct90`), - sum: createIndexes4(client, `${basePath}/sum`) - }; -} - -/** - * @typedef {Object} RelativePattern - * @property {Indexes27} negUnrealizedLossRelToMarketCap - * @property {Indexes26} netUnrealizedPnlRelToMarketCap - * @property {Indexes27} supplyInLossRelToCirculatingSupply - * @property {Indexes27} supplyInLossRelToOwnSupply - * @property {Indexes27} supplyInProfitRelToCirculatingSupply - * @property {Indexes27} supplyInProfitRelToOwnSupply - * @property {Indexes} supplyRelToCirculatingSupply - * @property {Indexes27} unrealizedLossRelToMarketCap - * @property {Indexes27} unrealizedProfitRelToMarketCap - */ - -/** - * Create a RelativePattern pattern node - * @param {BrkClientBase} client - * @param {string} basePath - * @returns {RelativePattern} - */ -function createRelativePattern(client, basePath) { - return { - negUnrealizedLossRelToMarketCap: createIndexes27(client, `${basePath}/neg_unrealized_loss_rel_to_market_cap`), - netUnrealizedPnlRelToMarketCap: createIndexes26(client, `${basePath}/net_unrealized_pnl_rel_to_market_cap`), - supplyInLossRelToCirculatingSupply: createIndexes27(client, `${basePath}/supply_in_loss_rel_to_circulating_supply`), - supplyInLossRelToOwnSupply: createIndexes27(client, `${basePath}/supply_in_loss_rel_to_own_supply`), - supplyInProfitRelToCirculatingSupply: createIndexes27(client, `${basePath}/supply_in_profit_rel_to_circulating_supply`), - supplyInProfitRelToOwnSupply: createIndexes27(client, `${basePath}/supply_in_profit_rel_to_own_supply`), - supplyRelToCirculatingSupply: createIndexes(client, `${basePath}/supply_rel_to_circulating_supply`), - unrealizedLossRelToMarketCap: createIndexes27(client, `${basePath}/unrealized_loss_rel_to_market_cap`), - unrealizedProfitRelToMarketCap: createIndexes27(client, `${basePath}/unrealized_profit_rel_to_market_cap`) + average: createMetricPattern1(client, _m(acc, 'avg')), + cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), + max: createMetricPattern1(client, _m(acc, 'max')), + median: createMetricPattern25(client, _m(acc, 'median')), + min: createMetricPattern1(client, _m(acc, 'min')), + pct10: createMetricPattern25(client, _m(acc, 'pct10')), + pct25: createMetricPattern25(client, _m(acc, 'pct25')), + pct75: createMetricPattern25(client, _m(acc, 'pct75')), + pct90: createMetricPattern25(client, _m(acc, 'pct90')), + sum: createMetricPattern1(client, _m(acc, 'sum')), }; } /** * @typedef {Object} UnrealizedPattern - * @property {Indexes26} negUnrealizedLoss - * @property {Indexes26} netUnrealizedPnl - * @property {SupplyPattern} supplyInLoss + * @property {MetricPattern3} negUnrealizedLoss + * @property {MetricPattern3} netUnrealizedPnl + * @property {SupplyPattern2} supplyInLoss * @property {SupplyValuePattern} supplyInLossValue - * @property {SupplyPattern} supplyInProfit + * @property {SupplyPattern2} supplyInProfit * @property {SupplyValuePattern} supplyInProfitValue - * @property {Indexes26} totalUnrealizedPnl - * @property {Indexes26} unrealizedLoss - * @property {Indexes26} unrealizedProfit + * @property {MetricPattern3} totalUnrealizedPnl + * @property {MetricPattern3} unrealizedLoss + * @property {MetricPattern3} unrealizedProfit */ /** * Create a UnrealizedPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {UnrealizedPattern} */ -function createUnrealizedPattern(client, basePath) { +function createUnrealizedPattern(client, acc) { return { - negUnrealizedLoss: createIndexes26(client, `${basePath}/neg_unrealized_loss`), - netUnrealizedPnl: createIndexes26(client, `${basePath}/net_unrealized_pnl`), - supplyInLoss: createSupplyPattern(client, `${basePath}/supply_in_loss`), - supplyInLossValue: createSupplyValuePattern(client, `${basePath}/supply_in_loss_value`), - supplyInProfit: createSupplyPattern(client, `${basePath}/supply_in_profit`), - supplyInProfitValue: createSupplyValuePattern(client, `${basePath}/supply_in_profit_value`), - totalUnrealizedPnl: createIndexes26(client, `${basePath}/total_unrealized_pnl`), - unrealizedLoss: createIndexes26(client, `${basePath}/unrealized_loss`), - unrealizedProfit: createIndexes26(client, `${basePath}/unrealized_profit`) + negUnrealizedLoss: createMetricPattern3(client, _m(acc, 'neg_unrealized_loss')), + netUnrealizedPnl: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl')), + supplyInLoss: createSupplyPattern2(client, _m(acc, 'supply_in_loss')), + supplyInLossValue: createSupplyValuePattern(client, _m(acc, 'supply_in_loss')), + supplyInProfit: createSupplyPattern2(client, _m(acc, 'supply_in_profit')), + supplyInProfitValue: createSupplyValuePattern(client, _m(acc, 'supply_in_profit')), + totalUnrealizedPnl: createMetricPattern3(client, _m(acc, 'total_unrealized_pnl')), + unrealizedLoss: createMetricPattern3(client, _m(acc, 'unrealized_loss')), + unrealizedProfit: createMetricPattern3(client, _m(acc, 'unrealized_profit')), }; } /** * @template T * @typedef {Object} Constant0Pattern - * @property {Indexes5} dateindex - * @property {Indexes7} decadeindex - * @property {Indexes2} height - * @property {Indexes8} monthindex - * @property {Indexes9} quarterindex - * @property {Indexes10} semesterindex - * @property {Indexes11} weekindex - * @property {Indexes12} yearindex + * @property {MetricPattern21} dateindex + * @property {MetricPattern22} decadeindex + * @property {MetricPattern25} height + * @property {MetricPattern27} monthindex + * @property {MetricPattern39} quarterindex + * @property {MetricPattern40} semesterindex + * @property {MetricPattern43} weekindex + * @property {MetricPattern44} yearindex */ /** @@ -3153,61 +2742,61 @@ function createUnrealizedPattern(client, basePath) { */ function createConstant0Pattern(client, acc) { return { - dateindex: createIndexes5(client, `/${acc}`), - decadeindex: createIndexes7(client, `/${acc}`), - height: createIndexes2(client, `/${acc}`), - monthindex: createIndexes8(client, `/${acc}`), - quarterindex: createIndexes9(client, `/${acc}`), - semesterindex: createIndexes10(client, `/${acc}`), - weekindex: createIndexes11(client, `/${acc}`), - yearindex: createIndexes12(client, `/${acc}`) + dateindex: createMetricPattern21(client, acc), + decadeindex: createMetricPattern22(client, acc), + height: createMetricPattern25(client, acc), + monthindex: createMetricPattern27(client, acc), + quarterindex: createMetricPattern39(client, acc), + semesterindex: createMetricPattern40(client, acc), + weekindex: createMetricPattern43(client, acc), + yearindex: createMetricPattern44(client, acc), }; } /** * @template T * @typedef {Object} AddresstypeToHeightToAddrCountPattern - * @property {Indexes16} p2a - * @property {Indexes17} p2pk33 - * @property {Indexes18} p2pk65 - * @property {Indexes19} p2pkh - * @property {Indexes20} p2sh - * @property {Indexes21} p2tr - * @property {Indexes22} p2wpkh - * @property {Indexes23} p2wsh + * @property {MetricPattern25} p2a + * @property {MetricPattern25} p2pk33 + * @property {MetricPattern25} p2pk65 + * @property {MetricPattern25} p2pkh + * @property {MetricPattern25} p2sh + * @property {MetricPattern25} p2tr + * @property {MetricPattern25} p2wpkh + * @property {MetricPattern25} p2wsh */ /** * Create a AddresstypeToHeightToAddrCountPattern pattern node * @template T * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {AddresstypeToHeightToAddrCountPattern} */ -function createAddresstypeToHeightToAddrCountPattern(client, basePath) { +function createAddresstypeToHeightToAddrCountPattern(client, acc) { return { - p2a: createIndexes16(client, `${basePath}/p2a`), - p2pk33: createIndexes17(client, `${basePath}/p2pk33`), - p2pk65: createIndexes18(client, `${basePath}/p2pk65`), - p2pkh: createIndexes19(client, `${basePath}/p2pkh`), - p2sh: createIndexes20(client, `${basePath}/p2sh`), - p2tr: createIndexes21(client, `${basePath}/p2tr`), - p2wpkh: createIndexes22(client, `${basePath}/p2wpkh`), - p2wsh: createIndexes23(client, `${basePath}/p2wsh`) + p2a: createMetricPattern25(client, (acc ? `p2a_${acc}` : 'p2a')), + p2pk33: createMetricPattern25(client, (acc ? `p2pk33_${acc}` : 'p2pk33')), + p2pk65: createMetricPattern25(client, (acc ? `p2pk65_${acc}` : 'p2pk65')), + p2pkh: createMetricPattern25(client, (acc ? `p2pkh_${acc}` : 'p2pkh')), + p2sh: createMetricPattern25(client, (acc ? `p2sh_${acc}` : 'p2sh')), + p2tr: createMetricPattern25(client, (acc ? `p2tr_${acc}` : 'p2tr')), + p2wpkh: createMetricPattern25(client, (acc ? `p2wpkh_${acc}` : 'p2wpkh')), + p2wsh: createMetricPattern25(client, (acc ? `p2wsh_${acc}` : 'p2wsh')), }; } /** * @template T * @typedef {Object} BlockIntervalPattern - * @property {Indexes3} average - * @property {Indexes3} max - * @property {Indexes2} median - * @property {Indexes3} min - * @property {Indexes2} pct10 - * @property {Indexes2} pct25 - * @property {Indexes2} pct75 - * @property {Indexes2} pct90 + * @property {MetricPattern1} average + * @property {MetricPattern1} max + * @property {MetricPattern25} median + * @property {MetricPattern1} min + * @property {MetricPattern25} pct10 + * @property {MetricPattern25} pct25 + * @property {MetricPattern25} pct75 + * @property {MetricPattern25} pct90 */ /** @@ -3219,220 +2808,272 @@ function createAddresstypeToHeightToAddrCountPattern(client, basePath) { */ function createBlockIntervalPattern(client, acc) { return { - average: createIndexes3(client, `/${acc}_avg`), - max: createIndexes3(client, `/${acc}_max`), - median: createIndexes2(client, `/${acc}_median`), - min: createIndexes3(client, `/${acc}_min`), - pct10: createIndexes2(client, `/${acc}_pct10`), - pct25: createIndexes2(client, `/${acc}_pct25`), - pct75: createIndexes2(client, `/${acc}_pct75`), - pct90: createIndexes2(client, `/${acc}_pct90`) + average: createMetricPattern1(client, _m(acc, 'avg')), + max: createMetricPattern1(client, _m(acc, 'max')), + median: createMetricPattern25(client, _m(acc, 'median')), + min: createMetricPattern1(client, _m(acc, 'min')), + pct10: createMetricPattern25(client, _m(acc, 'pct10')), + pct25: createMetricPattern25(client, _m(acc, 'pct25')), + pct75: createMetricPattern25(client, _m(acc, 'pct75')), + pct90: createMetricPattern25(client, _m(acc, 'pct90')), }; } /** * @typedef {Object} _0satsPattern - * @property {ActivityPattern} activity - * @property {Indexes3} addrCount - * @property {PricePaidPattern} pricePaid + * @property {ActivityPattern2} activity + * @property {MetricPattern1} addrCount + * @property {CostBasisPattern} costBasis * @property {RealizedPattern} realized * @property {RelativePattern} relative - * @property {SupplyPattern2} supply + * @property {SupplyPattern3} supply * @property {UnrealizedPattern} unrealized */ /** * Create a _0satsPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {_0satsPattern} */ -function create_0satsPattern(client, basePath) { +function create_0satsPattern(client, acc) { return { - activity: createActivityPattern(client, `${basePath}/activity`), - addrCount: createIndexes3(client, `${basePath}/addr_count`), - pricePaid: createPricePaidPattern(client, `${basePath}/price_paid`), - realized: createRealizedPattern(client, `${basePath}/realized`), - relative: createRelativePattern(client, `${basePath}/relative`), - supply: createSupplyPattern2(client, `${basePath}/supply`), - unrealized: createUnrealizedPattern(client, `${basePath}/unrealized`) - }; -} - -/** - * @typedef {Object} UpTo1dPattern - * @property {ActivityPattern} activity - * @property {PricePaidPattern2} pricePaid - * @property {RealizedPattern3} realized - * @property {RelativePattern2} relative - * @property {SupplyPattern2} supply - * @property {UnrealizedPattern} unrealized - */ - -/** - * Create a UpTo1dPattern pattern node - * @param {BrkClientBase} client - * @param {string} basePath - * @returns {UpTo1dPattern} - */ -function createUpTo1dPattern(client, basePath) { - return { - activity: createActivityPattern(client, `${basePath}/activity`), - pricePaid: createPricePaidPattern2(client, `${basePath}/price_paid`), - realized: createRealizedPattern3(client, `${basePath}/realized`), - relative: createRelativePattern2(client, `${basePath}/relative`), - supply: createSupplyPattern2(client, `${basePath}/supply`), - unrealized: createUnrealizedPattern(client, `${basePath}/unrealized`) + activity: createActivityPattern2(client, acc), + addrCount: createMetricPattern1(client, _m(acc, 'addr_count')), + costBasis: createCostBasisPattern(client, acc), + realized: createRealizedPattern(client, acc), + relative: createRelativePattern(client, acc), + supply: createSupplyPattern3(client, acc), + unrealized: createUnrealizedPattern(client, acc), }; } /** * @typedef {Object} _0satsPattern2 - * @property {ActivityPattern} activity - * @property {PricePaidPattern} pricePaid + * @property {ActivityPattern2} activity + * @property {CostBasisPattern} costBasis * @property {RealizedPattern} realized * @property {RelativePattern} relative - * @property {SupplyPattern2} supply + * @property {SupplyPattern3} supply * @property {UnrealizedPattern} unrealized */ /** * Create a _0satsPattern2 pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {_0satsPattern2} */ -function create_0satsPattern2(client, basePath) { +function create_0satsPattern2(client, acc) { return { - activity: createActivityPattern(client, `${basePath}/activity`), - pricePaid: createPricePaidPattern(client, `${basePath}/price_paid`), - realized: createRealizedPattern(client, `${basePath}/realized`), - relative: createRelativePattern(client, `${basePath}/relative`), - supply: createSupplyPattern2(client, `${basePath}/supply`), - unrealized: createUnrealizedPattern(client, `${basePath}/unrealized`) + activity: createActivityPattern2(client, acc), + costBasis: createCostBasisPattern(client, acc), + realized: createRealizedPattern(client, acc), + relative: createRelativePattern(client, acc), + supply: createSupplyPattern3(client, acc), + unrealized: createUnrealizedPattern(client, acc), }; } /** * @typedef {Object} _10yTo12yPattern - * @property {ActivityPattern} activity - * @property {PricePaidPattern2} pricePaid + * @property {ActivityPattern2} activity + * @property {CostBasisPattern2} costBasis * @property {RealizedPattern2} realized * @property {RelativePattern2} relative - * @property {SupplyPattern2} supply + * @property {SupplyPattern3} supply * @property {UnrealizedPattern} unrealized */ /** * Create a _10yTo12yPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {_10yTo12yPattern} */ -function create_10yTo12yPattern(client, basePath) { +function create_10yTo12yPattern(client, acc) { return { - activity: createActivityPattern(client, `${basePath}/activity`), - pricePaid: createPricePaidPattern2(client, `${basePath}/price_paid`), - realized: createRealizedPattern2(client, `${basePath}/realized`), - relative: createRelativePattern2(client, `${basePath}/relative`), - supply: createSupplyPattern2(client, `${basePath}/supply`), - unrealized: createUnrealizedPattern(client, `${basePath}/unrealized`) + activity: createActivityPattern2(client, acc), + costBasis: createCostBasisPattern2(client, acc), + realized: createRealizedPattern2(client, acc), + relative: createRelativePattern2(client, acc), + supply: createSupplyPattern3(client, acc), + unrealized: createUnrealizedPattern(client, acc), }; } /** - * @typedef {Object} ActivityPattern + * @typedef {Object} UpTo1dPattern + * @property {ActivityPattern2} activity + * @property {CostBasisPattern2} costBasis + * @property {RealizedPattern3} realized + * @property {RelativePattern2} relative + * @property {SupplyPattern3} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * Create a UpTo1dPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {UpTo1dPattern} + */ +function createUpTo1dPattern(client, acc) { + return { + activity: createActivityPattern2(client, acc), + costBasis: createCostBasisPattern2(client, acc), + realized: createRealizedPattern3(client, acc), + relative: createRelativePattern2(client, acc), + supply: createSupplyPattern3(client, acc), + unrealized: createUnrealizedPattern(client, acc), + }; +} + +/** + * @template T + * @typedef {Object} SegwitAdoptionPattern + * @property {MetricPattern2} average + * @property {MetricPattern25} base + * @property {MetricPattern1} cumulative + * @property {MetricPattern2} max + * @property {MetricPattern2} min + * @property {MetricPattern2} sum + */ + +/** + * Create a SegwitAdoptionPattern pattern node + * @template T + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {SegwitAdoptionPattern} + */ +function createSegwitAdoptionPattern(client, acc) { + return { + average: createMetricPattern2(client, _m(acc, 'avg')), + base: createMetricPattern25(client, acc), + cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), + max: createMetricPattern2(client, _m(acc, 'max')), + min: createMetricPattern2(client, _m(acc, 'min')), + sum: createMetricPattern2(client, _m(acc, 'sum')), + }; +} + +/** + * @typedef {Object} ActivityPattern2 * @property {BlockCountPattern} coinblocksDestroyed * @property {BlockCountPattern} coindaysDestroyed - * @property {Indexes2} satblocksDestroyed - * @property {Indexes2} satdaysDestroyed - * @property {FeePattern2} sent + * @property {MetricPattern25} satblocksDestroyed + * @property {MetricPattern25} satdaysDestroyed + * @property {SentPattern} sent */ /** - * Create a ActivityPattern pattern node + * Create a ActivityPattern2 pattern node * @param {BrkClientBase} client - * @param {string} basePath - * @returns {ActivityPattern} + * @param {string} acc - Accumulated metric name + * @returns {ActivityPattern2} */ -function createActivityPattern(client, basePath) { +function createActivityPattern2(client, acc) { return { - coinblocksDestroyed: createBlockCountPattern(client, `${basePath}/coinblocks_destroyed`), - coindaysDestroyed: createBlockCountPattern(client, `${basePath}/coindays_destroyed`), - satblocksDestroyed: createIndexes2(client, `${basePath}/satblocks_destroyed`), - satdaysDestroyed: createIndexes2(client, `${basePath}/satdays_destroyed`), - sent: createFeePattern2(client, `${basePath}/sent`) + coinblocksDestroyed: createBlockCountPattern(client, _m(acc, 'coinblocks_destroyed')), + coindaysDestroyed: createBlockCountPattern(client, _m(acc, 'coindays_destroyed')), + satblocksDestroyed: createMetricPattern25(client, _m(acc, 'satblocks_destroyed')), + satdaysDestroyed: createMetricPattern25(client, _m(acc, 'satdays_destroyed')), + sent: createSentPattern(client, _m(acc, 'sent')), }; } /** - * @typedef {Object} SupplyPattern2 - * @property {SupplyPattern} supply + * @typedef {Object} SupplyPattern3 + * @property {SupplyPattern2} supply * @property {ActiveSupplyPattern} supplyHalf * @property {ActiveSupplyPattern} supplyHalfValue * @property {SupplyValuePattern} supplyValue - * @property {Indexes3} utxoCount + * @property {MetricPattern1} utxoCount */ /** - * Create a SupplyPattern2 pattern node + * Create a SupplyPattern3 pattern node * @param {BrkClientBase} client - * @param {string} basePath - * @returns {SupplyPattern2} + * @param {string} acc - Accumulated metric name + * @returns {SupplyPattern3} */ -function createSupplyPattern2(client, basePath) { +function createSupplyPattern3(client, acc) { return { - supply: createSupplyPattern(client, `${basePath}/supply`), - supplyHalf: createActiveSupplyPattern(client, `${basePath}/supply_half`), - supplyHalfValue: createActiveSupplyPattern(client, `${basePath}/supply_half_value`), - supplyValue: createSupplyValuePattern(client, `${basePath}/supply_value`), - utxoCount: createIndexes3(client, `${basePath}/utxo_count`) + supply: createSupplyPattern2(client, _m(acc, 'supply')), + supplyHalf: createActiveSupplyPattern(client, _m(acc, 'supply_half')), + supplyHalfValue: createActiveSupplyPattern(client, _m(acc, 'supply_half')), + supplyValue: createSupplyValuePattern(client, _m(acc, 'supply')), + utxoCount: createMetricPattern1(client, _m(acc, 'utxo_count')), }; } /** - * @typedef {Object} FeePattern2 - * @property {Indexes2} base + * @typedef {Object} SentPattern + * @property {MetricPattern25} base * @property {BlockCountPattern} bitcoin * @property {BlockCountPattern} dollars * @property {SatsPattern} sats */ /** - * Create a FeePattern2 pattern node + * Create a SentPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath - * @returns {FeePattern2} + * @param {string} acc - Accumulated metric name + * @returns {SentPattern} */ -function createFeePattern2(client, basePath) { +function createSentPattern(client, acc) { return { - base: createIndexes2(client, `${basePath}/base`), - bitcoin: createBlockCountPattern(client, `${basePath}/bitcoin`), - dollars: createBlockCountPattern(client, `${basePath}/dollars`), - sats: createSatsPattern(client, `${basePath}/sats`) + base: createMetricPattern25(client, acc), + bitcoin: createBlockCountPattern(client, _m(acc, 'btc')), + dollars: createBlockCountPattern(client, _m(acc, 'usd')), + sats: createSatsPattern(client, acc), }; } /** - * @typedef {Object} SupplyPattern - * @property {Indexes2} base - * @property {Indexes} bitcoin - * @property {Indexes} dollars - * @property {Indexes} sats + * @typedef {Object} OpreturnPattern + * @property {MetricPattern25} base + * @property {BitcoinPattern2} bitcoin + * @property {BitcoinPattern2} dollars + * @property {SatsPattern4} sats */ /** - * Create a SupplyPattern pattern node + * Create a OpreturnPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath - * @returns {SupplyPattern} + * @param {string} acc - Accumulated metric name + * @returns {OpreturnPattern} */ -function createSupplyPattern(client, basePath) { +function createOpreturnPattern(client, acc) { return { - base: createIndexes2(client, `${basePath}/base`), - bitcoin: createIndexes(client, `${basePath}/bitcoin`), - dollars: createIndexes(client, `${basePath}/dollars`), - sats: createIndexes(client, `${basePath}/sats`) + base: createMetricPattern25(client, acc), + bitcoin: createBitcoinPattern2(client, _m(acc, 'btc')), + dollars: createBitcoinPattern2(client, _m(acc, 'usd')), + sats: createSatsPattern4(client, acc), + }; +} + +/** + * @typedef {Object} SupplyPattern2 + * @property {MetricPattern25} base + * @property {MetricPattern4} bitcoin + * @property {MetricPattern4} dollars + * @property {MetricPattern4} sats + */ + +/** + * Create a SupplyPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {SupplyPattern2} + */ +function createSupplyPattern2(client, acc) { + return { + base: createMetricPattern25(client, acc), + bitcoin: createMetricPattern4(client, _m(acc, 'btc')), + dollars: createMetricPattern4(client, _m(acc, 'usd')), + sats: createMetricPattern4(client, acc), }; } @@ -3446,35 +3087,14 @@ function createSupplyPattern(client, basePath) { /** * Create a UnclaimedRewardsPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {UnclaimedRewardsPattern} */ -function createUnclaimedRewardsPattern(client, basePath) { +function createUnclaimedRewardsPattern(client, acc) { return { - bitcoin: createBlockCountPattern(client, `${basePath}/bitcoin`), - dollars: createBlockCountPattern(client, `${basePath}/dollars`), - sats: createBlockCountPattern(client, `${basePath}/sats`) - }; -} - -/** - * @typedef {Object} PricePaidPattern2 - * @property {Indexes3} maxPricePaid - * @property {Indexes3} minPricePaid - * @property {PricePercentilesPattern} pricePercentiles - */ - -/** - * Create a PricePaidPattern2 pattern node - * @param {BrkClientBase} client - * @param {string} basePath - * @returns {PricePaidPattern2} - */ -function createPricePaidPattern2(client, basePath) { - return { - maxPricePaid: createIndexes3(client, `${basePath}/max_price_paid`), - minPricePaid: createIndexes3(client, `${basePath}/min_price_paid`), - pricePercentiles: createPricePercentilesPattern(client, `${basePath}/price_percentiles`) + bitcoin: createBlockCountPattern(client, _m(acc, 'btc')), + dollars: createBlockCountPattern(client, _m(acc, 'usd')), + sats: createBlockCountPattern(client, acc), }; } @@ -3488,84 +3108,166 @@ function createPricePaidPattern2(client, basePath) { /** * Create a CoinbasePattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {CoinbasePattern} */ -function createCoinbasePattern(client, basePath) { +function createCoinbasePattern(client, acc) { return { - bitcoin: createBitcoinPattern(client, `${basePath}/bitcoin`), - dollars: createBitcoinPattern(client, `${basePath}/dollars`), - sats: createBitcoinPattern(client, `${basePath}/sats`) + bitcoin: createBitcoinPattern(client, _m(acc, 'btc')), + dollars: createBitcoinPattern(client, _m(acc, 'usd')), + sats: createBitcoinPattern(client, acc), }; } /** * @typedef {Object} ActiveSupplyPattern - * @property {Indexes3} bitcoin - * @property {Indexes3} dollars - * @property {Indexes3} sats + * @property {MetricPattern1} bitcoin + * @property {MetricPattern1} dollars + * @property {MetricPattern1} sats */ /** * Create a ActiveSupplyPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {ActiveSupplyPattern} */ -function createActiveSupplyPattern(client, basePath) { +function createActiveSupplyPattern(client, acc) { return { - bitcoin: createIndexes3(client, `${basePath}/bitcoin`), - dollars: createIndexes3(client, `${basePath}/dollars`), - sats: createIndexes3(client, `${basePath}/sats`) + bitcoin: createMetricPattern1(client, _m(acc, 'btc')), + dollars: createMetricPattern1(client, _m(acc, 'usd')), + sats: createMetricPattern1(client, acc), + }; +} + +/** + * @typedef {Object} CostBasisPattern2 + * @property {MetricPattern1} maxCostBasis + * @property {MetricPattern1} minCostBasis + * @property {PercentilesPattern} percentiles + */ + +/** + * Create a CostBasisPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {CostBasisPattern2} + */ +function createCostBasisPattern2(client, acc) { + return { + maxCostBasis: createMetricPattern1(client, _m(acc, 'max_cost_basis')), + minCostBasis: createMetricPattern1(client, _m(acc, 'min_cost_basis')), + percentiles: createPercentilesPattern(client, _m(acc, 'cost_basis')), }; } /** * @template T * @typedef {Object} BlockCountPattern - * @property {Indexes2} base - * @property {Indexes3} cumulative - * @property {Indexes4} sum + * @property {MetricPattern25} base + * @property {MetricPattern1} cumulative + * @property {MetricPattern2} sum */ /** * Create a BlockCountPattern pattern node * @template T * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {BlockCountPattern} */ -function createBlockCountPattern(client, basePath) { +function createBlockCountPattern(client, acc) { return { - base: createIndexes2(client, `${basePath}/base`), - cumulative: createIndexes3(client, `${basePath}/cumulative`), - sum: createIndexes4(client, `${basePath}/sum`) + base: createMetricPattern25(client, acc), + cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), + sum: createMetricPattern2(client, _m(acc, 'sum')), }; } /** - * @typedef {Object} PricePaidPattern - * @property {Indexes3} maxPricePaid - * @property {Indexes3} minPricePaid + * @template T + * @typedef {Object} BitcoinPattern2 + * @property {MetricPattern25} base + * @property {MetricPattern1} cumulative + * @property {MetricPattern2} last */ /** - * Create a PricePaidPattern pattern node + * Create a BitcoinPattern2 pattern node + * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {PricePaidPattern} + * @param {string} acc - Accumulated metric name + * @returns {BitcoinPattern2} */ -function createPricePaidPattern(client, basePath) { +function createBitcoinPattern2(client, acc) { return { - maxPricePaid: createIndexes3(client, `${basePath}/max_price_paid`), - minPricePaid: createIndexes3(client, `${basePath}/min_price_paid`) + base: createMetricPattern25(client, acc), + cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), + last: createMetricPattern2(client, acc), + }; +} + +/** + * @typedef {Object} SatsPattern4 + * @property {MetricPattern1} cumulative + * @property {MetricPattern2} last + */ + +/** + * Create a SatsPattern4 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {SatsPattern4} + */ +function createSatsPattern4(client, acc) { + return { + cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), + last: createMetricPattern2(client, acc), + }; +} + +/** + * @typedef {Object} CostBasisPattern + * @property {MetricPattern1} maxCostBasis + * @property {MetricPattern1} minCostBasis + */ + +/** + * Create a CostBasisPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {CostBasisPattern} + */ +function createCostBasisPattern(client, acc) { + return { + maxCostBasis: createMetricPattern1(client, _m(acc, 'max_cost_basis')), + minCostBasis: createMetricPattern1(client, _m(acc, 'min_cost_basis')), + }; +} + +/** + * @typedef {Object} SatsPattern + * @property {MetricPattern1} cumulative + * @property {MetricPattern2} sum + */ + +/** + * Create a SatsPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {SatsPattern} + */ +function createSatsPattern(client, acc) { + return { + cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), + sum: createMetricPattern2(client, acc), }; } /** * @typedef {Object} _1dReturns1mSdPattern - * @property {Indexes} sd - * @property {Indexes} sma + * @property {MetricPattern4} sd + * @property {MetricPattern4} sma */ /** @@ -3576,84 +3278,65 @@ function createPricePaidPattern(client, basePath) { */ function create_1dReturns1mSdPattern(client, acc) { return { - sd: createIndexes(client, `/${acc}_sd`), - sma: createIndexes(client, `/${acc}_sma`) + sd: createMetricPattern4(client, _m(acc, 'sd')), + sma: createMetricPattern4(client, _m(acc, 'sma')), }; } /** * @typedef {Object} SupplyValuePattern - * @property {Indexes2} bitcoin - * @property {Indexes2} dollars + * @property {MetricPattern25} bitcoin + * @property {MetricPattern25} dollars */ /** * Create a SupplyValuePattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {SupplyValuePattern} */ -function createSupplyValuePattern(client, basePath) { +function createSupplyValuePattern(client, acc) { return { - bitcoin: createIndexes2(client, `${basePath}/bitcoin`), - dollars: createIndexes2(client, `${basePath}/dollars`) - }; -} - -/** - * @typedef {Object} SatsPattern - * @property {Indexes3} cumulative - * @property {Indexes4} sum - */ - -/** - * Create a SatsPattern pattern node - * @param {BrkClientBase} client - * @param {string} basePath - * @returns {SatsPattern} - */ -function createSatsPattern(client, basePath) { - return { - cumulative: createIndexes3(client, `${basePath}/cumulative`), - sum: createIndexes4(client, `${basePath}/sum`) + bitcoin: createMetricPattern25(client, _m(acc, 'btc')), + dollars: createMetricPattern25(client, _m(acc, 'usd')), }; } /** * @template T - * @typedef {Object} BitcoinPattern2 - * @property {Indexes2} base - * @property {Indexes4} sum + * @typedef {Object} TotalRealizedPnlPattern + * @property {MetricPattern25} base + * @property {MetricPattern2} sum */ /** - * Create a BitcoinPattern2 pattern node + * Create a TotalRealizedPnlPattern pattern node * @template T * @param {BrkClientBase} client - * @param {string} basePath - * @returns {BitcoinPattern2} + * @param {string} acc - Accumulated metric name + * @returns {TotalRealizedPnlPattern} */ -function createBitcoinPattern2(client, basePath) { +function createTotalRealizedPnlPattern(client, acc) { return { - base: createIndexes2(client, `${basePath}/base`), - sum: createIndexes4(client, `${basePath}/sum`) + base: createMetricPattern25(client, acc), + sum: createMetricPattern2(client, _m(acc, 'sum')), }; } /** * @typedef {Object} RealizedPriceExtraPattern - * @property {Indexes} ratio + * @property {MetricPattern4} ratio */ /** * Create a RealizedPriceExtraPattern pattern node * @param {BrkClientBase} client - * @param {string} basePath + * @param {string} acc - Accumulated metric name * @returns {RealizedPriceExtraPattern} */ -function createRealizedPriceExtraPattern(client, basePath) { +function createRealizedPriceExtraPattern(client, acc) { return { - ratio: createIndexes(client, `${basePath}/ratio`) + ratio: createMetricPattern4(client, _m(acc, 'ratio')), }; } @@ -3667,171 +3350,178 @@ function createRealizedPriceExtraPattern(client, basePath) { /** * @typedef {Object} CatalogTree_Computed - * @property {CatalogTree_Computed_Blks} blks - * @property {CatalogTree_Computed_Chain} chain + * @property {CatalogTree_Computed_Blocks} blocks * @property {CatalogTree_Computed_Cointime} cointime * @property {CatalogTree_Computed_Constants} constants - * @property {CatalogTree_Computed_Fetched} fetched + * @property {CatalogTree_Computed_Distribution} distribution * @property {CatalogTree_Computed_Indexes} indexes + * @property {CatalogTree_Computed_Inputs} inputs * @property {CatalogTree_Computed_Market} market + * @property {CatalogTree_Computed_Outputs} outputs * @property {CatalogTree_Computed_Pools} pools + * @property {CatalogTree_Computed_Positions} positions * @property {CatalogTree_Computed_Price} price - * @property {CatalogTree_Computed_Stateful} stateful - * @property {CatalogTree_Computed_Txins} txins - * @property {CatalogTree_Computed_Txouts} txouts + * @property {CatalogTree_Computed_Scripts} scripts + * @property {CatalogTree_Computed_Supply} supply + * @property {CatalogTree_Computed_Transactions} transactions */ /** - * @typedef {Object} CatalogTree_Computed_Blks - * @property {MetricNode} position + * @typedef {Object} CatalogTree_Computed_Blocks + * @property {CatalogTree_Computed_Blocks_Count} count + * @property {CatalogTree_Computed_Blocks_Difficulty} difficulty + * @property {CatalogTree_Computed_Blocks_Halving} halving + * @property {CatalogTree_Computed_Blocks_Interval} interval + * @property {CatalogTree_Computed_Blocks_Mining} mining + * @property {CatalogTree_Computed_Blocks_Rewards} rewards + * @property {CatalogTree_Computed_Blocks_Size} size + * @property {CatalogTree_Computed_Blocks_Time} time + * @property {CatalogTree_Computed_Blocks_Weight} weight */ /** - * @typedef {Object} CatalogTree_Computed_Chain - * @property {Indexes} _1mBlockCount - * @property {Indexes} _1wBlockCount - * @property {Indexes} _1yBlockCount - * @property {Indexes2} _24hBlockCount - * @property {Indexes2} _24hCoinbaseSum - * @property {Indexes2} _24hCoinbaseUsdSum - * @property {Indexes} annualizedVolume - * @property {Indexes} annualizedVolumeBtc - * @property {Indexes} annualizedVolumeUsd + * @typedef {Object} CatalogTree_Computed_Blocks_Count + * @property {MetricPattern4} _1mBlockCount + * @property {MetricPattern4} _1wBlockCount + * @property {MetricPattern4} _1yBlockCount + * @property {MetricPattern25} _24hBlockCount * @property {BlockCountPattern} blockCount - * @property {Indexes} blockCountTarget + * @property {MetricPattern4} blockCountTarget + */ + +/** + * @typedef {Object} CatalogTree_Computed_Blocks_Difficulty + * @property {MetricPattern1} blocksBeforeNextDifficultyAdjustment + * @property {MetricPattern1} daysBeforeNextDifficultyAdjustment + * @property {MetricPattern4} difficultyepoch + */ + +/** + * @typedef {Object} CatalogTree_Computed_Blocks_Halving + * @property {MetricPattern1} blocksBeforeNextHalving + * @property {MetricPattern1} daysBeforeNextHalving + * @property {MetricPattern4} halvingepoch + */ + +/** + * @typedef {Object} CatalogTree_Computed_Blocks_Interval * @property {BlockIntervalPattern} blockInterval + * @property {MetricPattern25} interval + */ + +/** + * @typedef {Object} CatalogTree_Computed_Blocks_Mining + * @property {MetricPattern2} difficulty + * @property {MetricPattern1} difficultyAdjustment + * @property {MetricPattern1} difficultyAsHash + * @property {MetricPattern1} hashPricePhs + * @property {MetricPattern1} hashPricePhsMin + * @property {MetricPattern1} hashPriceRebound + * @property {MetricPattern1} hashPriceThs + * @property {MetricPattern1} hashPriceThsMin + * @property {MetricPattern1} hashRate + * @property {MetricPattern4} hashRate1mSma + * @property {MetricPattern4} hashRate1wSma + * @property {MetricPattern4} hashRate1ySma + * @property {MetricPattern4} hashRate2mSma + * @property {MetricPattern1} hashValuePhs + * @property {MetricPattern1} hashValuePhsMin + * @property {MetricPattern1} hashValueRebound + * @property {MetricPattern1} hashValueThs + * @property {MetricPattern1} hashValueThsMin + */ + +/** + * @typedef {Object} CatalogTree_Computed_Blocks_Rewards + * @property {MetricPattern25} _24hCoinbaseSum + * @property {MetricPattern25} _24hCoinbaseUsdSum + * @property {CoinbasePattern} coinbase + * @property {MetricPattern21} feeDominance + * @property {CoinbasePattern} subsidy + * @property {MetricPattern21} subsidyDominance + * @property {MetricPattern4} subsidyUsd1ySma + * @property {UnclaimedRewardsPattern} unclaimedRewards + */ + +/** + * @typedef {Object} CatalogTree_Computed_Blocks_Size * @property {BlockSizePattern} blockSize * @property {BlockSizePattern} blockVbytes + * @property {MetricPattern25} vbytes + */ + +/** + * @typedef {Object} CatalogTree_Computed_Blocks_Time + * @property {MetricPattern25} date + * @property {MetricPattern25} dateFixed + * @property {MetricPattern2} timestamp + * @property {MetricPattern25} timestampFixed + */ + +/** + * @typedef {Object} CatalogTree_Computed_Blocks_Weight + * @property {BitcoinPattern} blockFullness * @property {BlockSizePattern} blockWeight - * @property {Indexes3} blocksBeforeNextDifficultyAdjustment - * @property {Indexes3} blocksBeforeNextHalving - * @property {CoinbasePattern} coinbase - * @property {Indexes3} daysBeforeNextDifficultyAdjustment - * @property {Indexes3} daysBeforeNextHalving - * @property {Indexes4} difficulty - * @property {Indexes3} difficultyAdjustment - * @property {Indexes3} difficultyAsHash - * @property {Indexes} difficultyepoch - * @property {BitcoinPattern} emptyoutputCount - * @property {Indexes3} exactUtxoCount - * @property {CatalogTree_Computed_Chain_Fee} fee - * @property {Indexes5} feeDominance - * @property {CatalogTree_Computed_Chain_FeeRate} feeRate - * @property {Indexes} halvingepoch - * @property {Indexes3} hashPricePhs - * @property {Indexes3} hashPricePhsMin - * @property {Indexes3} hashPriceRebound - * @property {Indexes3} hashPriceThs - * @property {Indexes3} hashPriceThsMin - * @property {Indexes3} hashRate - * @property {Indexes} hashRate1mSma - * @property {Indexes} hashRate1wSma - * @property {Indexes} hashRate1ySma - * @property {Indexes} hashRate2mSma - * @property {Indexes3} hashValuePhs - * @property {Indexes3} hashValuePhsMin - * @property {Indexes3} hashValueRebound - * @property {Indexes3} hashValueThs - * @property {Indexes3} hashValueThsMin - * @property {Indexes} inflationRate - * @property {BlockSizePattern} inputCount - * @property {Indexes6} inputValue - * @property {Indexes} inputsPerSec - * @property {Indexes2} interval - * @property {Indexes6} isCoinbase - * @property {BitcoinPattern} opreturnCount - * @property {BlockSizePattern} outputCount - * @property {Indexes6} outputValue - * @property {Indexes} outputsPerSec - * @property {BitcoinPattern} p2aCount - * @property {BitcoinPattern} p2msCount - * @property {BitcoinPattern} p2pk33Count - * @property {BitcoinPattern} p2pk65Count - * @property {BitcoinPattern} p2pkhCount - * @property {BitcoinPattern} p2shCount - * @property {BitcoinPattern} p2trCount - * @property {BitcoinPattern} p2wpkhCount - * @property {BitcoinPattern} p2wshCount - * @property {Indexes} puellMultiple - * @property {CatalogTree_Computed_Chain_SentSum} sentSum - * @property {CoinbasePattern} subsidy - * @property {Indexes5} subsidyDominance - * @property {Indexes} subsidyUsd1ySma - * @property {MetricNode} timestamp - * @property {Indexes} txBtcVelocity - * @property {BitcoinPattern} txCount - * @property {Indexes} txPerSec - * @property {Indexes} txUsdVelocity - * @property {BlockCountPattern} txV1 - * @property {BlockCountPattern} txV2 - * @property {BlockCountPattern} txV3 - * @property {BlockIntervalPattern} txVsize - * @property {BlockIntervalPattern} txWeight - * @property {UnclaimedRewardsPattern} unclaimedRewards - * @property {BitcoinPattern} unknownoutputCount - * @property {Indexes2} vbytes - * @property {Indexes6} vsize - * @property {Indexes6} weight - */ - -/** - * @typedef {Object} CatalogTree_Computed_Chain_Fee - * @property {Indexes6} base - * @property {BlockSizePattern} bitcoin - * @property {Indexes6} bitcoinTxindex - * @property {BlockSizePattern} dollars - * @property {Indexes6} dollarsTxindex - * @property {BlockSizePattern} sats - */ - -/** - * @typedef {Object} CatalogTree_Computed_Chain_FeeRate - * @property {Indexes3} average - * @property {Indexes6} base - * @property {Indexes3} max - * @property {Indexes2} median - * @property {Indexes3} min - * @property {Indexes2} pct10 - * @property {Indexes2} pct25 - * @property {Indexes2} pct75 - * @property {Indexes2} pct90 - */ - -/** - * @typedef {Object} CatalogTree_Computed_Chain_SentSum - * @property {BitcoinPattern2} bitcoin - * @property {Indexes3} dollars - * @property {Indexes3} sats */ /** * @typedef {Object} CatalogTree_Computed_Cointime - * @property {Indexes3} activeCap - * @property {Indexes3} activePrice - * @property {ActivePriceRatioPattern} activePriceRatio - * @property {ActiveSupplyPattern} activeSupply - * @property {Indexes3} activityToVaultednessRatio + * @property {CatalogTree_Computed_Cointime_Activity} activity + * @property {CatalogTree_Computed_Cointime_Adjusted} adjusted + * @property {CatalogTree_Computed_Cointime_Cap} cap + * @property {CatalogTree_Computed_Cointime_Pricing} pricing + * @property {CatalogTree_Computed_Cointime_Supply} supply + * @property {CatalogTree_Computed_Cointime_Value} value + */ + +/** + * @typedef {Object} CatalogTree_Computed_Cointime_Activity + * @property {MetricPattern1} activityToVaultednessRatio * @property {BlockCountPattern} coinblocksCreated * @property {BlockCountPattern} coinblocksStored - * @property {Indexes} cointimeAdjInflationRate - * @property {Indexes} cointimeAdjTxBtcVelocity - * @property {Indexes} cointimeAdjTxUsdVelocity - * @property {Indexes3} cointimeCap - * @property {Indexes3} cointimePrice + * @property {MetricPattern1} liveliness + * @property {MetricPattern1} vaultedness + */ + +/** + * @typedef {Object} CatalogTree_Computed_Cointime_Adjusted + * @property {MetricPattern4} cointimeAdjInflationRate + * @property {MetricPattern4} cointimeAdjTxBtcVelocity + * @property {MetricPattern4} cointimeAdjTxUsdVelocity + */ + +/** + * @typedef {Object} CatalogTree_Computed_Cointime_Cap + * @property {MetricPattern1} activeCap + * @property {MetricPattern1} cointimeCap + * @property {MetricPattern1} investorCap + * @property {MetricPattern1} thermoCap + * @property {MetricPattern1} vaultedCap + */ + +/** + * @typedef {Object} CatalogTree_Computed_Cointime_Pricing + * @property {MetricPattern1} activePrice + * @property {ActivePriceRatioPattern} activePriceRatio + * @property {MetricPattern1} cointimePrice * @property {ActivePriceRatioPattern} cointimePriceRatio + * @property {MetricPattern1} trueMarketMean + * @property {ActivePriceRatioPattern} trueMarketMeanRatio + * @property {MetricPattern1} vaultedPrice + * @property {ActivePriceRatioPattern} vaultedPriceRatio + */ + +/** + * @typedef {Object} CatalogTree_Computed_Cointime_Supply + * @property {ActiveSupplyPattern} activeSupply + * @property {ActiveSupplyPattern} vaultedSupply + */ + +/** + * @typedef {Object} CatalogTree_Computed_Cointime_Value * @property {BlockCountPattern} cointimeValueCreated * @property {BlockCountPattern} cointimeValueDestroyed * @property {BlockCountPattern} cointimeValueStored - * @property {Indexes3} investorCap - * @property {Indexes3} liveliness - * @property {Indexes3} thermoCap - * @property {Indexes3} trueMarketMean - * @property {ActivePriceRatioPattern} trueMarketMeanRatio - * @property {Indexes3} vaultedCap - * @property {Indexes3} vaultedPrice - * @property {ActivePriceRatioPattern} vaultedPriceRatio - * @property {ActiveSupplyPattern} vaultedSupply - * @property {Indexes3} vaultedness */ /** @@ -3853,221 +3543,636 @@ function createRealizedPriceExtraPattern(client, basePath) { */ /** - * @typedef {Object} CatalogTree_Computed_Fetched - * @property {Indexes13} priceOhlcInCents + * @typedef {Object} CatalogTree_Computed_Distribution + * @property {MetricPattern1} addrCount + * @property {CatalogTree_Computed_Distribution_AddressCohorts} addressCohorts + * @property {CatalogTree_Computed_Distribution_AddressesData} addressesData + * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToHeightToAddrCount + * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToHeightToEmptyAddrCount + * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToIndexesToAddrCount + * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToIndexesToEmptyAddrCount + * @property {AddresstypeToHeightToAddrCountPattern} anyAddressIndexes + * @property {MetricPattern25} chainState + * @property {MetricPattern1} emptyAddrCount + * @property {MetricPattern46} emptyaddressindex + * @property {MetricPattern45} loadedaddressindex + * @property {CatalogTree_Computed_Distribution_UtxoCohorts} utxoCohorts + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_AddressCohorts + * @property {CatalogTree_Computed_Distribution_AddressCohorts_AmountRange} amountRange + * @property {CatalogTree_Computed_Distribution_AddressCohorts_GeAmount} geAmount + * @property {CatalogTree_Computed_Distribution_AddressCohorts_LtAmount} ltAmount + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_AddressCohorts_AmountRange + * @property {_0satsPattern} _0sats + * @property {_0satsPattern} _100btcTo1kBtc + * @property {_0satsPattern} _100kBtcOrMore + * @property {_0satsPattern} _100kSatsTo1mSats + * @property {_0satsPattern} _100satsTo1kSats + * @property {_0satsPattern} _10btcTo100btc + * @property {_0satsPattern} _10kBtcTo100kBtc + * @property {_0satsPattern} _10kSatsTo100kSats + * @property {_0satsPattern} _10mSatsTo1btc + * @property {_0satsPattern} _10satsTo100sats + * @property {_0satsPattern} _1btcTo10btc + * @property {_0satsPattern} _1kBtcTo10kBtc + * @property {_0satsPattern} _1kSatsTo10kSats + * @property {_0satsPattern} _1mSatsTo10mSats + * @property {_0satsPattern} _1satTo10sats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_AddressCohorts_GeAmount + * @property {_0satsPattern} _100btc + * @property {_0satsPattern} _100kSats + * @property {_0satsPattern} _100sats + * @property {_0satsPattern} _10btc + * @property {_0satsPattern} _10kBtc + * @property {_0satsPattern} _10kSats + * @property {_0satsPattern} _10mSats + * @property {_0satsPattern} _10sats + * @property {_0satsPattern} _1btc + * @property {_0satsPattern} _1kBtc + * @property {_0satsPattern} _1kSats + * @property {_0satsPattern} _1mSats + * @property {_0satsPattern} _1sat + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_AddressCohorts_LtAmount + * @property {_0satsPattern} _100btc + * @property {_0satsPattern} _100kBtc + * @property {_0satsPattern} _100kSats + * @property {_0satsPattern} _100sats + * @property {_0satsPattern} _10btc + * @property {_0satsPattern} _10kBtc + * @property {_0satsPattern} _10kSats + * @property {_0satsPattern} _10mSats + * @property {_0satsPattern} _10sats + * @property {_0satsPattern} _1btc + * @property {_0satsPattern} _1kBtc + * @property {_0satsPattern} _1kSats + * @property {_0satsPattern} _1mSats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_AddressesData + * @property {MetricPattern46} empty + * @property {MetricPattern45} loaded + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange} ageRange + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_All} all + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange} amountRange + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Epoch} epoch + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount} geAmount + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount} ltAmount + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge} maxAge + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_MinAge} minAge + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Term} term + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Type} type + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Year} year + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange + * @property {_10yTo12yPattern} _10yTo12y + * @property {_10yTo12yPattern} _12yTo15y + * @property {_10yTo12yPattern} _1dTo1w + * @property {_10yTo12yPattern} _1mTo2m + * @property {_10yTo12yPattern} _1wTo1m + * @property {_10yTo12yPattern} _1yTo2y + * @property {_10yTo12yPattern} _2mTo3m + * @property {_10yTo12yPattern} _2yTo3y + * @property {_10yTo12yPattern} _3mTo4m + * @property {_10yTo12yPattern} _3yTo4y + * @property {_10yTo12yPattern} _4mTo5m + * @property {_10yTo12yPattern} _4yTo5y + * @property {_10yTo12yPattern} _5mTo6m + * @property {_10yTo12yPattern} _5yTo6y + * @property {_10yTo12yPattern} _6mTo1y + * @property {_10yTo12yPattern} _6yTo7y + * @property {_10yTo12yPattern} _7yTo8y + * @property {_10yTo12yPattern} _8yTo10y + * @property {_10yTo12yPattern} from15y + * @property {UpTo1dPattern} upTo1d + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_All + * @property {ActivityPattern2} activity + * @property {CostBasisPattern2} costBasis + * @property {RealizedPattern3} realized + * @property {CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative} relative + * @property {SupplyPattern3} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative + * @property {MetricPattern5} negUnrealizedLossRelToOwnTotalUnrealizedPnl + * @property {MetricPattern3} netUnrealizedPnlRelToOwnTotalUnrealizedPnl + * @property {MetricPattern5} supplyInLossRelToOwnSupply + * @property {MetricPattern5} supplyInProfitRelToOwnSupply + * @property {MetricPattern5} unrealizedLossRelToOwnTotalUnrealizedPnl + * @property {MetricPattern5} unrealizedProfitRelToOwnTotalUnrealizedPnl + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange + * @property {_0satsPattern2} _0sats + * @property {_0satsPattern2} _100btcTo1kBtc + * @property {_0satsPattern2} _100kBtcOrMore + * @property {_0satsPattern2} _100kSatsTo1mSats + * @property {_0satsPattern2} _100satsTo1kSats + * @property {_0satsPattern2} _10btcTo100btc + * @property {_0satsPattern2} _10kBtcTo100kBtc + * @property {_0satsPattern2} _10kSatsTo100kSats + * @property {_0satsPattern2} _10mSatsTo1btc + * @property {_0satsPattern2} _10satsTo100sats + * @property {_0satsPattern2} _1btcTo10btc + * @property {_0satsPattern2} _1kBtcTo10kBtc + * @property {_0satsPattern2} _1kSatsTo10kSats + * @property {_0satsPattern2} _1mSatsTo10mSats + * @property {_0satsPattern2} _1satTo10sats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Epoch + * @property {_10yTo12yPattern} _0 + * @property {_10yTo12yPattern} _1 + * @property {_10yTo12yPattern} _2 + * @property {_10yTo12yPattern} _3 + * @property {_10yTo12yPattern} _4 + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount + * @property {_0satsPattern2} _100btc + * @property {_0satsPattern2} _100kSats + * @property {_0satsPattern2} _100sats + * @property {_0satsPattern2} _10btc + * @property {_0satsPattern2} _10kBtc + * @property {_0satsPattern2} _10kSats + * @property {_0satsPattern2} _10mSats + * @property {_0satsPattern2} _10sats + * @property {_0satsPattern2} _1btc + * @property {_0satsPattern2} _1kBtc + * @property {_0satsPattern2} _1kSats + * @property {_0satsPattern2} _1mSats + * @property {_0satsPattern2} _1sat + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount + * @property {_0satsPattern2} _100btc + * @property {_0satsPattern2} _100kBtc + * @property {_0satsPattern2} _100kSats + * @property {_0satsPattern2} _100sats + * @property {_0satsPattern2} _10btc + * @property {_0satsPattern2} _10kBtc + * @property {_0satsPattern2} _10kSats + * @property {_0satsPattern2} _10mSats + * @property {_0satsPattern2} _10sats + * @property {_0satsPattern2} _1btc + * @property {_0satsPattern2} _1kBtc + * @property {_0satsPattern2} _1kSats + * @property {_0satsPattern2} _1mSats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge + * @property {UpTo1dPattern} _10y + * @property {UpTo1dPattern} _12y + * @property {UpTo1dPattern} _15y + * @property {UpTo1dPattern} _1m + * @property {UpTo1dPattern} _1w + * @property {UpTo1dPattern} _1y + * @property {UpTo1dPattern} _2m + * @property {UpTo1dPattern} _2y + * @property {UpTo1dPattern} _3m + * @property {UpTo1dPattern} _3y + * @property {UpTo1dPattern} _4m + * @property {UpTo1dPattern} _4y + * @property {UpTo1dPattern} _5m + * @property {UpTo1dPattern} _5y + * @property {UpTo1dPattern} _6m + * @property {UpTo1dPattern} _6y + * @property {UpTo1dPattern} _7y + * @property {UpTo1dPattern} _8y + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_MinAge + * @property {_10yTo12yPattern} _10y + * @property {_10yTo12yPattern} _12y + * @property {_10yTo12yPattern} _1d + * @property {_10yTo12yPattern} _1m + * @property {_10yTo12yPattern} _1w + * @property {_10yTo12yPattern} _1y + * @property {_10yTo12yPattern} _2m + * @property {_10yTo12yPattern} _2y + * @property {_10yTo12yPattern} _3m + * @property {_10yTo12yPattern} _3y + * @property {_10yTo12yPattern} _4m + * @property {_10yTo12yPattern} _4y + * @property {_10yTo12yPattern} _5m + * @property {_10yTo12yPattern} _5y + * @property {_10yTo12yPattern} _6m + * @property {_10yTo12yPattern} _6y + * @property {_10yTo12yPattern} _7y + * @property {_10yTo12yPattern} _8y + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Term + * @property {UpTo1dPattern} long + * @property {UpTo1dPattern} short + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Type + * @property {_0satsPattern2} empty + * @property {_0satsPattern2} p2a + * @property {_0satsPattern2} p2ms + * @property {_0satsPattern2} p2pk33 + * @property {_0satsPattern2} p2pk65 + * @property {_0satsPattern2} p2pkh + * @property {_0satsPattern2} p2sh + * @property {_0satsPattern2} p2tr + * @property {_0satsPattern2} p2wpkh + * @property {_0satsPattern2} p2wsh + * @property {_0satsPattern2} unknown + */ + +/** + * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Year + * @property {_10yTo12yPattern} _2009 + * @property {_10yTo12yPattern} _2010 + * @property {_10yTo12yPattern} _2011 + * @property {_10yTo12yPattern} _2012 + * @property {_10yTo12yPattern} _2013 + * @property {_10yTo12yPattern} _2014 + * @property {_10yTo12yPattern} _2015 + * @property {_10yTo12yPattern} _2016 + * @property {_10yTo12yPattern} _2017 + * @property {_10yTo12yPattern} _2018 + * @property {_10yTo12yPattern} _2019 + * @property {_10yTo12yPattern} _2020 + * @property {_10yTo12yPattern} _2021 + * @property {_10yTo12yPattern} _2022 + * @property {_10yTo12yPattern} _2023 + * @property {_10yTo12yPattern} _2024 + * @property {_10yTo12yPattern} _2025 + * @property {_10yTo12yPattern} _2026 */ /** * @typedef {Object} CatalogTree_Computed_Indexes - * @property {Indexes13} date - * @property {Indexes2} dateFixed - * @property {Indexes13} dateindex - * @property {Indexes14} dateindexCount - * @property {MetricNode} decadeindex - * @property {MetricNode} difficultyepoch - * @property {MetricNode} emptyoutputindex - * @property {Indexes14} firstDateindex - * @property {MetricNode} firstHeight - * @property {Indexes15} firstMonthindex - * @property {Indexes7} firstYearindex - * @property {MetricNode} halvingepoch - * @property {Indexes2} height - * @property {MetricNode} heightCount - * @property {Indexes6} inputCount - * @property {MetricNode} monthindex - * @property {Indexes15} monthindexCount - * @property {MetricNode} opreturnindex - * @property {Indexes6} outputCount - * @property {Indexes16} p2aaddressindex - * @property {MetricNode} p2msoutputindex - * @property {Indexes17} p2pk33addressindex - * @property {Indexes18} p2pk65addressindex - * @property {Indexes19} p2pkhaddressindex - * @property {Indexes20} p2shaddressindex - * @property {Indexes21} p2traddressindex - * @property {Indexes22} p2wpkhaddressindex - * @property {Indexes23} p2wshaddressindex - * @property {MetricNode} quarterindex - * @property {MetricNode} semesterindex - * @property {Indexes2} timestampFixed - * @property {Indexes6} txindex - * @property {Indexes2} txindexCount - * @property {Indexes24} txinindex - * @property {Indexes25} txoutindex - * @property {MetricNode} unknownoutputindex - * @property {MetricNode} weekindex - * @property {MetricNode} yearindex - * @property {Indexes7} yearindexCount + * @property {CatalogTree_Computed_Indexes_Address} address + * @property {CatalogTree_Computed_Indexes_Block} block + * @property {CatalogTree_Computed_Indexes_Time} time + * @property {CatalogTree_Computed_Indexes_Transaction} transaction + */ + +/** + * @typedef {Object} CatalogTree_Computed_Indexes_Address + * @property {MetricPattern24} emptyoutputindex + * @property {MetricPattern28} opreturnindex + * @property {MetricPattern30} p2aaddressindex + * @property {MetricPattern31} p2msoutputindex + * @property {MetricPattern32} p2pk33addressindex + * @property {MetricPattern33} p2pk65addressindex + * @property {MetricPattern34} p2pkhaddressindex + * @property {MetricPattern35} p2shaddressindex + * @property {MetricPattern36} p2traddressindex + * @property {MetricPattern37} p2wpkhaddressindex + * @property {MetricPattern38} p2wshaddressindex + * @property {MetricPattern42} unknownoutputindex + */ + +/** + * @typedef {Object} CatalogTree_Computed_Indexes_Block + * @property {MetricPattern25} dateindex + * @property {MetricPattern14} difficultyepoch + * @property {MetricPattern13} firstHeight + * @property {MetricPattern15} halvingepoch + * @property {MetricPattern25} height + * @property {MetricPattern23} heightCount + * @property {MetricPattern25} txindexCount + */ + +/** + * @typedef {Object} CatalogTree_Computed_Indexes_Time + * @property {MetricPattern21} date + * @property {MetricPattern21} dateindex + * @property {MetricPattern19} dateindexCount + * @property {MetricPattern12} decadeindex + * @property {MetricPattern19} firstDateindex + * @property {MetricPattern21} firstHeight + * @property {MetricPattern8} firstMonthindex + * @property {MetricPattern22} firstYearindex + * @property {MetricPattern21} heightCount + * @property {MetricPattern10} monthindex + * @property {MetricPattern8} monthindexCount + * @property {MetricPattern17} quarterindex + * @property {MetricPattern18} semesterindex + * @property {MetricPattern11} weekindex + * @property {MetricPattern20} yearindex + * @property {MetricPattern22} yearindexCount + */ + +/** + * @typedef {Object} CatalogTree_Computed_Indexes_Transaction + * @property {MetricPattern41} inputCount + * @property {MetricPattern41} outputCount + * @property {MetricPattern41} txindex + * @property {MetricPattern26} txinindex + * @property {MetricPattern29} txoutindex + */ + +/** + * @typedef {Object} CatalogTree_Computed_Inputs + * @property {CatalogTree_Computed_Inputs_Count} count + * @property {CatalogTree_Computed_Inputs_Spent} spent + */ + +/** + * @typedef {Object} CatalogTree_Computed_Inputs_Count + * @property {BlockSizePattern} count + */ + +/** + * @typedef {Object} CatalogTree_Computed_Inputs_Spent + * @property {MetricPattern26} txoutindex + * @property {MetricPattern26} value */ /** * @typedef {Object} CatalogTree_Computed_Market + * @property {CatalogTree_Computed_Market_Ath} ath + * @property {CatalogTree_Computed_Market_Dca} dca + * @property {CatalogTree_Computed_Market_Indicators} indicators + * @property {CatalogTree_Computed_Market_Lookback} lookback + * @property {CatalogTree_Computed_Market_MovingAverage} movingAverage + * @property {CatalogTree_Computed_Market_Range} range + * @property {CatalogTree_Computed_Market_Returns} returns + * @property {CatalogTree_Computed_Market_Volatility} volatility + */ + +/** + * @typedef {Object} CatalogTree_Computed_Market_Ath + * @property {MetricPattern4} daysSincePriceAth + * @property {MetricPattern4} maxDaysBetweenPriceAths + * @property {MetricPattern4} maxYearsBetweenPriceAths + * @property {MetricPattern3} priceAth + * @property {MetricPattern3} priceDrawdown + */ + +/** + * @typedef {Object} CatalogTree_Computed_Market_Dca + * @property {MetricPattern4} _10yDcaAvgPrice + * @property {MetricPattern4} _10yDcaCagr + * @property {MetricPattern4} _10yDcaReturns + * @property {MetricPattern4} _10yDcaStack + * @property {MetricPattern4} _1mDcaAvgPrice + * @property {MetricPattern4} _1mDcaReturns + * @property {MetricPattern4} _1mDcaStack + * @property {MetricPattern4} _1wDcaAvgPrice + * @property {MetricPattern4} _1wDcaReturns + * @property {MetricPattern4} _1wDcaStack + * @property {MetricPattern4} _1yDcaAvgPrice + * @property {MetricPattern4} _1yDcaReturns + * @property {MetricPattern4} _1yDcaStack + * @property {MetricPattern4} _2yDcaAvgPrice + * @property {MetricPattern4} _2yDcaCagr + * @property {MetricPattern4} _2yDcaReturns + * @property {MetricPattern4} _2yDcaStack + * @property {MetricPattern4} _3mDcaAvgPrice + * @property {MetricPattern4} _3mDcaReturns + * @property {MetricPattern4} _3mDcaStack + * @property {MetricPattern4} _3yDcaAvgPrice + * @property {MetricPattern4} _3yDcaCagr + * @property {MetricPattern4} _3yDcaReturns + * @property {MetricPattern4} _3yDcaStack + * @property {MetricPattern4} _4yDcaAvgPrice + * @property {MetricPattern4} _4yDcaCagr + * @property {MetricPattern4} _4yDcaReturns + * @property {MetricPattern4} _4yDcaStack + * @property {MetricPattern4} _5yDcaAvgPrice + * @property {MetricPattern4} _5yDcaCagr + * @property {MetricPattern4} _5yDcaReturns + * @property {MetricPattern4} _5yDcaStack + * @property {MetricPattern4} _6mDcaAvgPrice + * @property {MetricPattern4} _6mDcaReturns + * @property {MetricPattern4} _6mDcaStack + * @property {MetricPattern4} _6yDcaAvgPrice + * @property {MetricPattern4} _6yDcaCagr + * @property {MetricPattern4} _6yDcaReturns + * @property {MetricPattern4} _6yDcaStack + * @property {MetricPattern4} _8yDcaAvgPrice + * @property {MetricPattern4} _8yDcaCagr + * @property {MetricPattern4} _8yDcaReturns + * @property {MetricPattern4} _8yDcaStack + * @property {MetricPattern4} dcaClass2015AvgPrice + * @property {MetricPattern4} dcaClass2015Returns + * @property {MetricPattern4} dcaClass2015Stack + * @property {MetricPattern4} dcaClass2016AvgPrice + * @property {MetricPattern4} dcaClass2016Returns + * @property {MetricPattern4} dcaClass2016Stack + * @property {MetricPattern4} dcaClass2017AvgPrice + * @property {MetricPattern4} dcaClass2017Returns + * @property {MetricPattern4} dcaClass2017Stack + * @property {MetricPattern4} dcaClass2018AvgPrice + * @property {MetricPattern4} dcaClass2018Returns + * @property {MetricPattern4} dcaClass2018Stack + * @property {MetricPattern4} dcaClass2019AvgPrice + * @property {MetricPattern4} dcaClass2019Returns + * @property {MetricPattern4} dcaClass2019Stack + * @property {MetricPattern4} dcaClass2020AvgPrice + * @property {MetricPattern4} dcaClass2020Returns + * @property {MetricPattern4} dcaClass2020Stack + * @property {MetricPattern4} dcaClass2021AvgPrice + * @property {MetricPattern4} dcaClass2021Returns + * @property {MetricPattern4} dcaClass2021Stack + * @property {MetricPattern4} dcaClass2022AvgPrice + * @property {MetricPattern4} dcaClass2022Returns + * @property {MetricPattern4} dcaClass2022Stack + * @property {MetricPattern4} dcaClass2023AvgPrice + * @property {MetricPattern4} dcaClass2023Returns + * @property {MetricPattern4} dcaClass2023Stack + * @property {MetricPattern4} dcaClass2024AvgPrice + * @property {MetricPattern4} dcaClass2024Returns + * @property {MetricPattern4} dcaClass2024Stack + * @property {MetricPattern4} dcaClass2025AvgPrice + * @property {MetricPattern4} dcaClass2025Returns + * @property {MetricPattern4} dcaClass2025Stack + */ + +/** + * @typedef {Object} CatalogTree_Computed_Market_Indicators + * @property {MetricPattern21} gini + * @property {MetricPattern21} macdHistogram + * @property {MetricPattern21} macdLine + * @property {MetricPattern21} macdSignal + * @property {MetricPattern21} nvt + * @property {MetricPattern21} piCycle + * @property {MetricPattern4} puellMultiple + * @property {MetricPattern21} rsi14d + * @property {MetricPattern21} rsi14dMax + * @property {MetricPattern21} rsi14dMin + * @property {MetricPattern21} rsiAvgGain14d + * @property {MetricPattern21} rsiAvgLoss14d + * @property {MetricPattern21} rsiGains + * @property {MetricPattern21} rsiLosses + * @property {MetricPattern21} stochD + * @property {MetricPattern21} stochK + * @property {MetricPattern21} stochRsi + * @property {MetricPattern21} stochRsiD + * @property {MetricPattern21} stochRsiK + */ + +/** + * @typedef {Object} CatalogTree_Computed_Market_Lookback + * @property {MetricPattern4} price10yAgo + * @property {MetricPattern4} price1dAgo + * @property {MetricPattern4} price1mAgo + * @property {MetricPattern4} price1wAgo + * @property {MetricPattern4} price1yAgo + * @property {MetricPattern4} price2yAgo + * @property {MetricPattern4} price3mAgo + * @property {MetricPattern4} price3yAgo + * @property {MetricPattern4} price4yAgo + * @property {MetricPattern4} price5yAgo + * @property {MetricPattern4} price6mAgo + * @property {MetricPattern4} price6yAgo + * @property {MetricPattern4} price8yAgo + */ + +/** + * @typedef {Object} CatalogTree_Computed_Market_MovingAverage + * @property {Price111dSmaPattern} price111dSma + * @property {Price111dSmaPattern} price12dEma + * @property {Price111dSmaPattern} price13dEma + * @property {Price111dSmaPattern} price13dSma + * @property {Price111dSmaPattern} price144dEma + * @property {Price111dSmaPattern} price144dSma + * @property {Price111dSmaPattern} price1mEma + * @property {Price111dSmaPattern} price1mSma + * @property {Price111dSmaPattern} price1wEma + * @property {Price111dSmaPattern} price1wSma + * @property {Price111dSmaPattern} price1yEma + * @property {Price111dSmaPattern} price1ySma + * @property {Price111dSmaPattern} price200dEma + * @property {Price111dSmaPattern} price200dSma + * @property {MetricPattern4} price200dSmaX08 + * @property {MetricPattern4} price200dSmaX24 + * @property {Price111dSmaPattern} price200wEma + * @property {Price111dSmaPattern} price200wSma + * @property {Price111dSmaPattern} price21dEma + * @property {Price111dSmaPattern} price21dSma + * @property {Price111dSmaPattern} price26dEma + * @property {Price111dSmaPattern} price2yEma + * @property {Price111dSmaPattern} price2ySma + * @property {Price111dSmaPattern} price34dEma + * @property {Price111dSmaPattern} price34dSma + * @property {Price111dSmaPattern} price350dSma + * @property {MetricPattern4} price350dSmaX2 + * @property {Price111dSmaPattern} price4yEma + * @property {Price111dSmaPattern} price4ySma + * @property {Price111dSmaPattern} price55dEma + * @property {Price111dSmaPattern} price55dSma + * @property {Price111dSmaPattern} price89dEma + * @property {Price111dSmaPattern} price89dSma + * @property {Price111dSmaPattern} price8dEma + * @property {Price111dSmaPattern} price8dSma + */ + +/** + * @typedef {Object} CatalogTree_Computed_Market_Range + * @property {MetricPattern4} price1mMax + * @property {MetricPattern4} price1mMin + * @property {MetricPattern4} price1wMax + * @property {MetricPattern4} price1wMin + * @property {MetricPattern4} price1yMax + * @property {MetricPattern4} price1yMin + * @property {MetricPattern4} price2wChoppinessIndex + * @property {MetricPattern4} price2wMax + * @property {MetricPattern4} price2wMin + * @property {MetricPattern21} priceTrueRange + * @property {MetricPattern21} priceTrueRange2wSum + */ + +/** + * @typedef {Object} CatalogTree_Computed_Market_Returns * @property {_1dReturns1mSdPattern} _1dReturns1mSd * @property {_1dReturns1mSdPattern} _1dReturns1wSd * @property {_1dReturns1mSdPattern} _1dReturns1ySd - * @property {Indexes} _10yCagr - * @property {Indexes} _10yDcaAvgPrice - * @property {Indexes} _10yDcaCagr - * @property {Indexes} _10yDcaReturns - * @property {Indexes} _10yDcaStack - * @property {Indexes} _10yPriceReturns - * @property {Indexes} _1dPriceReturns - * @property {Indexes} _1mDcaAvgPrice - * @property {Indexes} _1mDcaReturns - * @property {Indexes} _1mDcaStack - * @property {Indexes} _1mPriceReturns - * @property {Indexes} _1wDcaAvgPrice - * @property {Indexes} _1wDcaReturns - * @property {Indexes} _1wDcaStack - * @property {Indexes} _1wPriceReturns - * @property {Indexes} _1yDcaAvgPrice - * @property {Indexes} _1yDcaReturns - * @property {Indexes} _1yDcaStack - * @property {Indexes} _1yPriceReturns - * @property {Indexes} _2yCagr - * @property {Indexes} _2yDcaAvgPrice - * @property {Indexes} _2yDcaCagr - * @property {Indexes} _2yDcaReturns - * @property {Indexes} _2yDcaStack - * @property {Indexes} _2yPriceReturns - * @property {Indexes} _3mDcaAvgPrice - * @property {Indexes} _3mDcaReturns - * @property {Indexes} _3mDcaStack - * @property {Indexes} _3mPriceReturns - * @property {Indexes} _3yCagr - * @property {Indexes} _3yDcaAvgPrice - * @property {Indexes} _3yDcaCagr - * @property {Indexes} _3yDcaReturns - * @property {Indexes} _3yDcaStack - * @property {Indexes} _3yPriceReturns - * @property {Indexes} _4yCagr - * @property {Indexes} _4yDcaAvgPrice - * @property {Indexes} _4yDcaCagr - * @property {Indexes} _4yDcaReturns - * @property {Indexes} _4yDcaStack - * @property {Indexes} _4yPriceReturns - * @property {Indexes} _5yCagr - * @property {Indexes} _5yDcaAvgPrice - * @property {Indexes} _5yDcaCagr - * @property {Indexes} _5yDcaReturns - * @property {Indexes} _5yDcaStack - * @property {Indexes} _5yPriceReturns - * @property {Indexes} _6mDcaAvgPrice - * @property {Indexes} _6mDcaReturns - * @property {Indexes} _6mDcaStack - * @property {Indexes} _6mPriceReturns - * @property {Indexes} _6yCagr - * @property {Indexes} _6yDcaAvgPrice - * @property {Indexes} _6yDcaCagr - * @property {Indexes} _6yDcaReturns - * @property {Indexes} _6yDcaStack - * @property {Indexes} _6yPriceReturns - * @property {Indexes} _8yCagr - * @property {Indexes} _8yDcaAvgPrice - * @property {Indexes} _8yDcaCagr - * @property {Indexes} _8yDcaReturns - * @property {Indexes} _8yDcaStack - * @property {Indexes} _8yPriceReturns - * @property {Indexes} daysSincePriceAth - * @property {Indexes} dcaClass2015AvgPrice - * @property {Indexes} dcaClass2015Returns - * @property {Indexes} dcaClass2015Stack - * @property {Indexes} dcaClass2016AvgPrice - * @property {Indexes} dcaClass2016Returns - * @property {Indexes} dcaClass2016Stack - * @property {Indexes} dcaClass2017AvgPrice - * @property {Indexes} dcaClass2017Returns - * @property {Indexes} dcaClass2017Stack - * @property {Indexes} dcaClass2018AvgPrice - * @property {Indexes} dcaClass2018Returns - * @property {Indexes} dcaClass2018Stack - * @property {Indexes} dcaClass2019AvgPrice - * @property {Indexes} dcaClass2019Returns - * @property {Indexes} dcaClass2019Stack - * @property {Indexes} dcaClass2020AvgPrice - * @property {Indexes} dcaClass2020Returns - * @property {Indexes} dcaClass2020Stack - * @property {Indexes} dcaClass2021AvgPrice - * @property {Indexes} dcaClass2021Returns - * @property {Indexes} dcaClass2021Stack - * @property {Indexes} dcaClass2022AvgPrice - * @property {Indexes} dcaClass2022Returns - * @property {Indexes} dcaClass2022Stack - * @property {Indexes} dcaClass2023AvgPrice - * @property {Indexes} dcaClass2023Returns - * @property {Indexes} dcaClass2023Stack - * @property {Indexes} dcaClass2024AvgPrice - * @property {Indexes} dcaClass2024Returns - * @property {Indexes} dcaClass2024Stack - * @property {Indexes} dcaClass2025AvgPrice - * @property {Indexes} dcaClass2025Returns - * @property {Indexes} dcaClass2025Stack - * @property {Indexes} maxDaysBetweenPriceAths - * @property {Indexes} maxYearsBetweenPriceAths - * @property {Indexes} price10yAgo - * @property {Price13dEmaPattern} price13dEma - * @property {Price13dEmaPattern} price13dSma - * @property {Price13dEmaPattern} price144dEma - * @property {Price13dEmaPattern} price144dSma - * @property {Indexes} price1dAgo - * @property {Indexes} price1mAgo - * @property {Price13dEmaPattern} price1mEma - * @property {Indexes} price1mMax - * @property {Indexes} price1mMin - * @property {Price13dEmaPattern} price1mSma - * @property {Indexes} price1mVolatility - * @property {Indexes} price1wAgo - * @property {Price13dEmaPattern} price1wEma - * @property {Indexes} price1wMax - * @property {Indexes} price1wMin - * @property {Price13dEmaPattern} price1wSma - * @property {Indexes} price1wVolatility - * @property {Indexes} price1yAgo - * @property {Price13dEmaPattern} price1yEma - * @property {Indexes} price1yMax - * @property {Indexes} price1yMin - * @property {Price13dEmaPattern} price1ySma - * @property {Indexes} price1yVolatility - * @property {Price13dEmaPattern} price200dEma - * @property {Price13dEmaPattern} price200dSma - * @property {Indexes} price200dSmaX08 - * @property {Indexes} price200dSmaX24 - * @property {Price13dEmaPattern} price200wEma - * @property {Price13dEmaPattern} price200wSma - * @property {Price13dEmaPattern} price21dEma - * @property {Price13dEmaPattern} price21dSma - * @property {Indexes} price2wChoppinessIndex - * @property {Indexes} price2wMax - * @property {Indexes} price2wMin - * @property {Indexes} price2yAgo - * @property {Price13dEmaPattern} price2yEma - * @property {Price13dEmaPattern} price2ySma - * @property {Price13dEmaPattern} price34dEma - * @property {Price13dEmaPattern} price34dSma - * @property {Indexes} price3mAgo - * @property {Indexes} price3yAgo - * @property {Indexes} price4yAgo - * @property {Price13dEmaPattern} price4yEma - * @property {Price13dEmaPattern} price4ySma - * @property {Price13dEmaPattern} price55dEma - * @property {Price13dEmaPattern} price55dSma - * @property {Indexes} price5yAgo - * @property {Indexes} price6mAgo - * @property {Indexes} price6yAgo - * @property {Price13dEmaPattern} price89dEma - * @property {Price13dEmaPattern} price89dSma - * @property {Price13dEmaPattern} price8dEma - * @property {Price13dEmaPattern} price8dSma - * @property {Indexes} price8yAgo - * @property {Indexes26} priceAth - * @property {Indexes26} priceDrawdown - * @property {Indexes5} priceTrueRange - * @property {Indexes5} priceTrueRange2wSum + * @property {MetricPattern4} _10yCagr + * @property {MetricPattern4} _10yPriceReturns + * @property {MetricPattern4} _1dPriceReturns + * @property {MetricPattern4} _1mPriceReturns + * @property {MetricPattern4} _1wPriceReturns + * @property {MetricPattern4} _1yPriceReturns + * @property {MetricPattern4} _2yCagr + * @property {MetricPattern4} _2yPriceReturns + * @property {MetricPattern4} _3mPriceReturns + * @property {MetricPattern4} _3yCagr + * @property {MetricPattern4} _3yPriceReturns + * @property {MetricPattern4} _4yCagr + * @property {MetricPattern4} _4yPriceReturns + * @property {MetricPattern4} _5yCagr + * @property {MetricPattern4} _5yPriceReturns + * @property {MetricPattern4} _6mPriceReturns + * @property {MetricPattern4} _6yCagr + * @property {MetricPattern4} _6yPriceReturns + * @property {MetricPattern4} _8yCagr + * @property {MetricPattern4} _8yPriceReturns + * @property {_1dReturns1mSdPattern} downside1mSd + * @property {_1dReturns1mSdPattern} downside1wSd + * @property {_1dReturns1mSdPattern} downside1ySd + * @property {MetricPattern21} downsideReturns + */ + +/** + * @typedef {Object} CatalogTree_Computed_Market_Volatility + * @property {MetricPattern4} price1mVolatility + * @property {MetricPattern4} price1wVolatility + * @property {MetricPattern4} price1yVolatility + * @property {MetricPattern21} sharpe1m + * @property {MetricPattern21} sharpe1w + * @property {MetricPattern21} sharpe1y + * @property {MetricPattern21} sortino1m + * @property {MetricPattern21} sortino1w + * @property {MetricPattern21} sortino1y + */ + +/** + * @typedef {Object} CatalogTree_Computed_Outputs + * @property {CatalogTree_Computed_Outputs_Count} count + * @property {CatalogTree_Computed_Outputs_Spent} spent + */ + +/** + * @typedef {Object} CatalogTree_Computed_Outputs_Count + * @property {BlockSizePattern} count + * @property {BitcoinPattern} utxoCount + */ + +/** + * @typedef {Object} CatalogTree_Computed_Outputs_Spent + * @property {MetricPattern29} txinindex */ /** * @typedef {Object} CatalogTree_Computed_Pools - * @property {Indexes2} pool + * @property {MetricPattern25} pool * @property {CatalogTree_Computed_Pools_Vecs} vecs */ @@ -4233,327 +4338,209 @@ function createRealizedPriceExtraPattern(client, basePath) { * @property {AXbtPattern} zulupool */ +/** + * @typedef {Object} CatalogTree_Computed_Positions + * @property {MetricPattern16} position + */ + /** * @typedef {Object} CatalogTree_Computed_Price - * @property {Indexes3} priceClose - * @property {Indexes13} priceCloseInCents - * @property {Indexes3} priceCloseInSats - * @property {Indexes3} priceHigh - * @property {Indexes13} priceHighInCents - * @property {Indexes3} priceHighInSats - * @property {Indexes3} priceLow - * @property {Indexes13} priceLowInCents - * @property {Indexes3} priceLowInSats - * @property {Indexes3} priceOhlc - * @property {Indexes3} priceOhlcInSats - * @property {Indexes3} priceOpen - * @property {Indexes13} priceOpenInCents - * @property {Indexes3} priceOpenInSats + * @property {CatalogTree_Computed_Price_Ohlc} ohlc + * @property {CatalogTree_Computed_Price_Sats} sats + * @property {CatalogTree_Computed_Price_Usd} usd */ /** - * @typedef {Object} CatalogTree_Computed_Stateful - * @property {Indexes3} addrCount - * @property {CatalogTree_Computed_Stateful_AddressCohorts} addressCohorts - * @property {CatalogTree_Computed_Stateful_AddressesData} addressesData - * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToHeightToAddrCount - * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToHeightToEmptyAddrCount - * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToIndexesToAddrCount - * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToIndexesToEmptyAddrCount - * @property {AddresstypeToHeightToAddrCountPattern} anyAddressIndexes - * @property {Indexes2} chainState - * @property {Indexes3} emptyAddrCount - * @property {Indexes29} emptyaddressindex - * @property {Indexes30} loadedaddressindex - * @property {Indexes26} marketCap - * @property {SupplyPattern} opreturnSupply - * @property {SupplyPattern} unspendableSupply - * @property {CatalogTree_Computed_Stateful_UtxoCohorts} utxoCohorts + * @typedef {Object} CatalogTree_Computed_Price_Ohlc + * @property {MetricPattern9} ohlcInCents */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_AddressCohorts - * @property {CatalogTree_Computed_Stateful_AddressCohorts_AmountRange} amountRange - * @property {CatalogTree_Computed_Stateful_AddressCohorts_GeAmount} geAmount - * @property {CatalogTree_Computed_Stateful_AddressCohorts_LtAmount} ltAmount + * @typedef {Object} CatalogTree_Computed_Price_Sats + * @property {MetricPattern1} priceCloseInSats + * @property {MetricPattern1} priceHighInSats + * @property {MetricPattern1} priceLowInSats + * @property {MetricPattern1} priceOhlcInSats + * @property {MetricPattern1} priceOpenInSats */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_AddressCohorts_AmountRange - * @property {_0satsPattern} _0sats - * @property {_0satsPattern} _100btcTo1kBtc - * @property {_0satsPattern} _100kBtcOrMore - * @property {_0satsPattern} _100kSatsTo1mSats - * @property {_0satsPattern} _100satsTo1kSats - * @property {_0satsPattern} _10btcTo100btc - * @property {_0satsPattern} _10kBtcTo100kBtc - * @property {_0satsPattern} _10kSatsTo100kSats - * @property {_0satsPattern} _10mSatsTo1btc - * @property {_0satsPattern} _10satsTo100sats - * @property {_0satsPattern} _1btcTo10btc - * @property {_0satsPattern} _1kBtcTo10kBtc - * @property {_0satsPattern} _1kSatsTo10kSats - * @property {_0satsPattern} _1mSatsTo10mSats - * @property {_0satsPattern} _1satTo10sats + * @typedef {Object} CatalogTree_Computed_Price_Usd + * @property {MetricPattern1} priceClose + * @property {MetricPattern9} priceCloseInCents + * @property {MetricPattern1} priceHigh + * @property {MetricPattern9} priceHighInCents + * @property {MetricPattern1} priceLow + * @property {MetricPattern9} priceLowInCents + * @property {MetricPattern1} priceOhlc + * @property {MetricPattern1} priceOpen + * @property {MetricPattern9} priceOpenInCents */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_AddressCohorts_GeAmount - * @property {_0satsPattern} _100btc - * @property {_0satsPattern} _100kSats - * @property {_0satsPattern} _100sats - * @property {_0satsPattern} _10btc - * @property {_0satsPattern} _10kBtc - * @property {_0satsPattern} _10kSats - * @property {_0satsPattern} _10mSats - * @property {_0satsPattern} _10sats - * @property {_0satsPattern} _1btc - * @property {_0satsPattern} _1kBtc - * @property {_0satsPattern} _1kSats - * @property {_0satsPattern} _1mSats - * @property {_0satsPattern} _1sat + * @typedef {Object} CatalogTree_Computed_Scripts + * @property {CatalogTree_Computed_Scripts_Count} count + * @property {CatalogTree_Computed_Scripts_Value} value */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_AddressCohorts_LtAmount - * @property {_0satsPattern} _100btc - * @property {_0satsPattern} _100kBtc - * @property {_0satsPattern} _100kSats - * @property {_0satsPattern} _100sats - * @property {_0satsPattern} _10btc - * @property {_0satsPattern} _10kBtc - * @property {_0satsPattern} _10kSats - * @property {_0satsPattern} _10mSats - * @property {_0satsPattern} _10sats - * @property {_0satsPattern} _1btc - * @property {_0satsPattern} _1kBtc - * @property {_0satsPattern} _1kSats - * @property {_0satsPattern} _1mSats + * @typedef {Object} CatalogTree_Computed_Scripts_Count + * @property {BitcoinPattern} emptyoutputCount + * @property {BitcoinPattern} opreturnCount + * @property {BitcoinPattern} p2aCount + * @property {BitcoinPattern} p2msCount + * @property {BitcoinPattern} p2pk33Count + * @property {BitcoinPattern} p2pk65Count + * @property {BitcoinPattern} p2pkhCount + * @property {BitcoinPattern} p2shCount + * @property {BitcoinPattern} p2trCount + * @property {BitcoinPattern} p2wpkhCount + * @property {BitcoinPattern} p2wshCount + * @property {SegwitAdoptionPattern} segwitAdoption + * @property {BitcoinPattern} segwitCount + * @property {SegwitAdoptionPattern} taprootAdoption + * @property {BitcoinPattern} unknownoutputCount */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_AddressesData - * @property {Indexes29} empty - * @property {Indexes30} loaded + * @typedef {Object} CatalogTree_Computed_Scripts_Value + * @property {CatalogTree_Computed_Scripts_Value_OpreturnValue} opreturnValue */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange} ageRange - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_All} all - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange} amountRange - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_Epoch} epoch - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount} geAmount - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount} ltAmount - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge} maxAge - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_MinAge} minAge - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_Term} term - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_Type} type - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_Year} year + * @typedef {Object} CatalogTree_Computed_Scripts_Value_OpreturnValue + * @property {MetricPattern25} base + * @property {SegwitAdoptionPattern} bitcoin + * @property {SegwitAdoptionPattern} dollars + * @property {CatalogTree_Computed_Scripts_Value_OpreturnValue_Sats} sats */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange - * @property {_10yTo12yPattern} _10yTo12y - * @property {_10yTo12yPattern} _12yTo15y - * @property {_10yTo12yPattern} _1dTo1w - * @property {_10yTo12yPattern} _1mTo2m - * @property {_10yTo12yPattern} _1wTo1m - * @property {_10yTo12yPattern} _1yTo2y - * @property {_10yTo12yPattern} _2mTo3m - * @property {_10yTo12yPattern} _2yTo3y - * @property {_10yTo12yPattern} _3mTo4m - * @property {_10yTo12yPattern} _3yTo4y - * @property {_10yTo12yPattern} _4mTo5m - * @property {_10yTo12yPattern} _4yTo5y - * @property {_10yTo12yPattern} _5mTo6m - * @property {_10yTo12yPattern} _5yTo6y - * @property {_10yTo12yPattern} _6mTo1y - * @property {_10yTo12yPattern} _6yTo7y - * @property {_10yTo12yPattern} _7yTo8y - * @property {_10yTo12yPattern} _8yTo10y - * @property {_10yTo12yPattern} from15y - * @property {UpTo1dPattern} upTo1d + * @typedef {Object} CatalogTree_Computed_Scripts_Value_OpreturnValue_Sats + * @property {MetricPattern2} average + * @property {MetricPattern1} cumulative + * @property {MetricPattern2} max + * @property {MetricPattern2} min + * @property {MetricPattern2} sum */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_All - * @property {ActivityPattern} activity - * @property {PricePaidPattern2} pricePaid - * @property {RealizedPattern3} realized - * @property {CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative} relative - * @property {SupplyPattern2} supply - * @property {UnrealizedPattern} unrealized + * @typedef {Object} CatalogTree_Computed_Supply + * @property {CatalogTree_Computed_Supply_Burned} burned + * @property {CatalogTree_Computed_Supply_Circulating} circulating + * @property {CatalogTree_Computed_Supply_Inflation} inflation + * @property {CatalogTree_Computed_Supply_MarketCap} marketCap + * @property {CatalogTree_Computed_Supply_Velocity} velocity */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative - * @property {Indexes27} negUnrealizedLossRelToOwnTotalUnrealizedPnl - * @property {Indexes26} netUnrealizedPnlRelToOwnTotalUnrealizedPnl - * @property {Indexes27} supplyInLossRelToOwnSupply - * @property {Indexes27} supplyInProfitRelToOwnSupply - * @property {Indexes27} unrealizedLossRelToOwnTotalUnrealizedPnl - * @property {Indexes27} unrealizedProfitRelToOwnTotalUnrealizedPnl + * @typedef {Object} CatalogTree_Computed_Supply_Burned + * @property {OpreturnPattern} opreturn + * @property {OpreturnPattern} unspendable */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange - * @property {_0satsPattern2} _0sats - * @property {_0satsPattern2} _100btcTo1kBtc - * @property {_0satsPattern2} _100kBtcOrMore - * @property {_0satsPattern2} _100kSatsTo1mSats - * @property {_0satsPattern2} _100satsTo1kSats - * @property {_0satsPattern2} _10btcTo100btc - * @property {_0satsPattern2} _10kBtcTo100kBtc - * @property {_0satsPattern2} _10kSatsTo100kSats - * @property {_0satsPattern2} _10mSatsTo1btc - * @property {_0satsPattern2} _10satsTo100sats - * @property {_0satsPattern2} _1btcTo10btc - * @property {_0satsPattern2} _1kBtcTo10kBtc - * @property {_0satsPattern2} _1kSatsTo10kSats - * @property {_0satsPattern2} _1mSatsTo10mSats - * @property {_0satsPattern2} _1satTo10sats + * @typedef {Object} CatalogTree_Computed_Supply_Circulating + * @property {MetricPattern25} btc + * @property {ActiveSupplyPattern} indexes + * @property {MetricPattern25} sats + * @property {MetricPattern25} usd */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_Epoch - * @property {_10yTo12yPattern} _0 - * @property {_10yTo12yPattern} _1 - * @property {_10yTo12yPattern} _2 - * @property {_10yTo12yPattern} _3 - * @property {_10yTo12yPattern} _4 + * @typedef {Object} CatalogTree_Computed_Supply_Inflation + * @property {MetricPattern4} indexes */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount - * @property {_0satsPattern2} _100btc - * @property {_0satsPattern2} _100kSats - * @property {_0satsPattern2} _100sats - * @property {_0satsPattern2} _10btc - * @property {_0satsPattern2} _10kBtc - * @property {_0satsPattern2} _10kSats - * @property {_0satsPattern2} _10mSats - * @property {_0satsPattern2} _10sats - * @property {_0satsPattern2} _1btc - * @property {_0satsPattern2} _1kBtc - * @property {_0satsPattern2} _1kSats - * @property {_0satsPattern2} _1mSats - * @property {_0satsPattern2} _1sat + * @typedef {Object} CatalogTree_Computed_Supply_MarketCap + * @property {MetricPattern25} height + * @property {MetricPattern4} indexes */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount - * @property {_0satsPattern2} _100btc - * @property {_0satsPattern2} _100kBtc - * @property {_0satsPattern2} _100kSats - * @property {_0satsPattern2} _100sats - * @property {_0satsPattern2} _10btc - * @property {_0satsPattern2} _10kBtc - * @property {_0satsPattern2} _10kSats - * @property {_0satsPattern2} _10mSats - * @property {_0satsPattern2} _10sats - * @property {_0satsPattern2} _1btc - * @property {_0satsPattern2} _1kBtc - * @property {_0satsPattern2} _1kSats - * @property {_0satsPattern2} _1mSats + * @typedef {Object} CatalogTree_Computed_Supply_Velocity + * @property {MetricPattern4} btc + * @property {MetricPattern4} usd */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge - * @property {UpTo1dPattern} _10y - * @property {UpTo1dPattern} _12y - * @property {UpTo1dPattern} _15y - * @property {UpTo1dPattern} _1m - * @property {UpTo1dPattern} _1w - * @property {UpTo1dPattern} _1y - * @property {UpTo1dPattern} _2m - * @property {UpTo1dPattern} _2y - * @property {UpTo1dPattern} _3m - * @property {UpTo1dPattern} _3y - * @property {UpTo1dPattern} _4m - * @property {UpTo1dPattern} _4y - * @property {UpTo1dPattern} _5m - * @property {UpTo1dPattern} _5y - * @property {UpTo1dPattern} _6m - * @property {UpTo1dPattern} _6y - * @property {UpTo1dPattern} _7y - * @property {UpTo1dPattern} _8y + * @typedef {Object} CatalogTree_Computed_Transactions + * @property {CatalogTree_Computed_Transactions_Count} count + * @property {CatalogTree_Computed_Transactions_Fees} fees + * @property {CatalogTree_Computed_Transactions_Size} size + * @property {CatalogTree_Computed_Transactions_Versions} versions + * @property {CatalogTree_Computed_Transactions_Volume} volume */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_MinAge - * @property {_10yTo12yPattern} _10y - * @property {_10yTo12yPattern} _12y - * @property {_10yTo12yPattern} _1d - * @property {_10yTo12yPattern} _1m - * @property {_10yTo12yPattern} _1w - * @property {_10yTo12yPattern} _1y - * @property {_10yTo12yPattern} _2m - * @property {_10yTo12yPattern} _2y - * @property {_10yTo12yPattern} _3m - * @property {_10yTo12yPattern} _3y - * @property {_10yTo12yPattern} _4m - * @property {_10yTo12yPattern} _4y - * @property {_10yTo12yPattern} _5m - * @property {_10yTo12yPattern} _5y - * @property {_10yTo12yPattern} _6m - * @property {_10yTo12yPattern} _6y - * @property {_10yTo12yPattern} _7y - * @property {_10yTo12yPattern} _8y + * @typedef {Object} CatalogTree_Computed_Transactions_Count + * @property {MetricPattern41} isCoinbase + * @property {BitcoinPattern} txCount */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_Term - * @property {UpTo1dPattern} long - * @property {UpTo1dPattern} short + * @typedef {Object} CatalogTree_Computed_Transactions_Fees + * @property {CatalogTree_Computed_Transactions_Fees_Fee} fee + * @property {CatalogTree_Computed_Transactions_Fees_FeeRate} feeRate + * @property {MetricPattern41} inputValue + * @property {MetricPattern41} outputValue */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_Type - * @property {_0satsPattern2} empty - * @property {_0satsPattern2} p2a - * @property {_0satsPattern2} p2ms - * @property {_0satsPattern2} p2pk33 - * @property {_0satsPattern2} p2pk65 - * @property {_0satsPattern2} p2pkh - * @property {_0satsPattern2} p2sh - * @property {_0satsPattern2} p2tr - * @property {_0satsPattern2} p2wpkh - * @property {_0satsPattern2} p2wsh - * @property {_0satsPattern2} unknown + * @typedef {Object} CatalogTree_Computed_Transactions_Fees_Fee + * @property {MetricPattern41} base + * @property {BlockSizePattern} bitcoin + * @property {MetricPattern41} bitcoinTxindex + * @property {BlockSizePattern} dollars + * @property {MetricPattern41} dollarsTxindex + * @property {BlockSizePattern} sats */ /** - * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_Year - * @property {_10yTo12yPattern} _2009 - * @property {_10yTo12yPattern} _2010 - * @property {_10yTo12yPattern} _2011 - * @property {_10yTo12yPattern} _2012 - * @property {_10yTo12yPattern} _2013 - * @property {_10yTo12yPattern} _2014 - * @property {_10yTo12yPattern} _2015 - * @property {_10yTo12yPattern} _2016 - * @property {_10yTo12yPattern} _2017 - * @property {_10yTo12yPattern} _2018 - * @property {_10yTo12yPattern} _2019 - * @property {_10yTo12yPattern} _2020 - * @property {_10yTo12yPattern} _2021 - * @property {_10yTo12yPattern} _2022 - * @property {_10yTo12yPattern} _2023 - * @property {_10yTo12yPattern} _2024 - * @property {_10yTo12yPattern} _2025 - * @property {_10yTo12yPattern} _2026 + * @typedef {Object} CatalogTree_Computed_Transactions_Fees_FeeRate + * @property {MetricPattern1} average + * @property {MetricPattern41} base + * @property {MetricPattern1} max + * @property {MetricPattern25} median + * @property {MetricPattern1} min + * @property {MetricPattern25} pct10 + * @property {MetricPattern25} pct25 + * @property {MetricPattern25} pct75 + * @property {MetricPattern25} pct90 */ /** - * @typedef {Object} CatalogTree_Computed_Txins - * @property {Indexes24} txoutindex - * @property {Indexes24} value + * @typedef {Object} CatalogTree_Computed_Transactions_Size + * @property {BlockIntervalPattern} txVsize + * @property {BlockIntervalPattern} txWeight + * @property {MetricPattern41} vsize + * @property {MetricPattern41} weight */ /** - * @typedef {Object} CatalogTree_Computed_Txouts - * @property {Indexes25} txinindex + * @typedef {Object} CatalogTree_Computed_Transactions_Versions + * @property {BlockCountPattern} txV1 + * @property {BlockCountPattern} txV2 + * @property {BlockCountPattern} txV3 + */ + +/** + * @typedef {Object} CatalogTree_Computed_Transactions_Volume + * @property {MetricPattern4} annualizedVolume + * @property {MetricPattern4} annualizedVolumeBtc + * @property {MetricPattern4} annualizedVolumeUsd + * @property {MetricPattern4} inputsPerSec + * @property {MetricPattern4} outputsPerSec + * @property {CatalogTree_Computed_Transactions_Volume_SentSum} sentSum + * @property {MetricPattern4} txPerSec + */ + +/** + * @typedef {Object} CatalogTree_Computed_Transactions_Volume_SentSum + * @property {TotalRealizedPnlPattern} bitcoin + * @property {MetricPattern1} dollars + * @property {MetricPattern1} sats */ /** @@ -4568,72 +4555,72 @@ function createRealizedPriceExtraPattern(client, basePath) { /** * @typedef {Object} CatalogTree_Indexed_Address - * @property {Indexes2} firstP2aaddressindex - * @property {Indexes2} firstP2pk33addressindex - * @property {Indexes2} firstP2pk65addressindex - * @property {Indexes2} firstP2pkhaddressindex - * @property {Indexes2} firstP2shaddressindex - * @property {Indexes2} firstP2traddressindex - * @property {Indexes2} firstP2wpkhaddressindex - * @property {Indexes2} firstP2wshaddressindex - * @property {Indexes16} p2abytes - * @property {Indexes17} p2pk33bytes - * @property {Indexes18} p2pk65bytes - * @property {Indexes19} p2pkhbytes - * @property {Indexes20} p2shbytes - * @property {Indexes21} p2trbytes - * @property {Indexes22} p2wpkhbytes - * @property {Indexes23} p2wshbytes + * @property {MetricPattern25} firstP2aaddressindex + * @property {MetricPattern25} firstP2pk33addressindex + * @property {MetricPattern25} firstP2pk65addressindex + * @property {MetricPattern25} firstP2pkhaddressindex + * @property {MetricPattern25} firstP2shaddressindex + * @property {MetricPattern25} firstP2traddressindex + * @property {MetricPattern25} firstP2wpkhaddressindex + * @property {MetricPattern25} firstP2wshaddressindex + * @property {MetricPattern30} p2abytes + * @property {MetricPattern32} p2pk33bytes + * @property {MetricPattern33} p2pk65bytes + * @property {MetricPattern34} p2pkhbytes + * @property {MetricPattern35} p2shbytes + * @property {MetricPattern36} p2trbytes + * @property {MetricPattern37} p2wpkhbytes + * @property {MetricPattern38} p2wshbytes */ /** * @typedef {Object} CatalogTree_Indexed_Block - * @property {Indexes2} blockhash - * @property {Indexes2} difficulty - * @property {Indexes2} timestamp - * @property {Indexes2} totalSize - * @property {Indexes2} weight + * @property {MetricPattern25} blockhash + * @property {MetricPattern25} difficulty + * @property {MetricPattern25} timestamp + * @property {MetricPattern25} totalSize + * @property {MetricPattern25} weight */ /** * @typedef {Object} CatalogTree_Indexed_Output - * @property {Indexes2} firstEmptyoutputindex - * @property {Indexes2} firstOpreturnindex - * @property {Indexes2} firstP2msoutputindex - * @property {Indexes2} firstUnknownoutputindex - * @property {MetricNode} txindex + * @property {MetricPattern25} firstEmptyoutputindex + * @property {MetricPattern25} firstOpreturnindex + * @property {MetricPattern25} firstP2msoutputindex + * @property {MetricPattern25} firstUnknownoutputindex + * @property {MetricPattern7} txindex */ /** * @typedef {Object} CatalogTree_Indexed_Tx - * @property {Indexes6} baseSize - * @property {Indexes2} firstTxindex - * @property {Indexes6} firstTxinindex - * @property {Indexes6} firstTxoutindex - * @property {Indexes6} height - * @property {Indexes6} isExplicitlyRbf - * @property {Indexes6} rawlocktime - * @property {Indexes6} totalSize - * @property {Indexes6} txid - * @property {Indexes6} txversion + * @property {MetricPattern41} baseSize + * @property {MetricPattern25} firstTxindex + * @property {MetricPattern41} firstTxinindex + * @property {MetricPattern41} firstTxoutindex + * @property {MetricPattern41} height + * @property {MetricPattern41} isExplicitlyRbf + * @property {MetricPattern41} rawlocktime + * @property {MetricPattern41} totalSize + * @property {MetricPattern41} txid + * @property {MetricPattern41} txversion */ /** * @typedef {Object} CatalogTree_Indexed_Txin - * @property {Indexes2} firstTxinindex - * @property {Indexes24} outpoint - * @property {Indexes24} outputtype - * @property {Indexes24} txindex - * @property {Indexes24} typeindex + * @property {MetricPattern25} firstTxinindex + * @property {MetricPattern26} outpoint + * @property {MetricPattern26} outputtype + * @property {MetricPattern26} txindex + * @property {MetricPattern26} typeindex */ /** * @typedef {Object} CatalogTree_Indexed_Txout - * @property {Indexes2} firstTxoutindex - * @property {Indexes25} outputtype - * @property {Indexes25} txindex - * @property {Indexes25} typeindex - * @property {Indexes25} value + * @property {MetricPattern25} firstTxoutindex + * @property {MetricPattern29} outputtype + * @property {MetricPattern29} txindex + * @property {MetricPattern29} typeindex + * @property {MetricPattern29} value */ /** @@ -4641,6 +4628,894 @@ function createRealizedPriceExtraPattern(client, basePath) { * @extends BrkClientBase */ class BrkClient extends BrkClientBase { + VERSION = "v0.1.0-alpha.1"; + + INDEXES = /** @type {const} */ ([ + "dateindex", + "decadeindex", + "difficultyepoch", + "emptyoutputindex", + "halvingepoch", + "height", + "txinindex", + "monthindex", + "opreturnindex", + "txoutindex", + "p2aaddressindex", + "p2msoutputindex", + "p2pk33addressindex", + "p2pk65addressindex", + "p2pkhaddressindex", + "p2shaddressindex", + "p2traddressindex", + "p2wpkhaddressindex", + "p2wshaddressindex", + "quarterindex", + "semesterindex", + "txindex", + "unknownoutputindex", + "weekindex", + "yearindex", + "loadedaddressindex", + "emptyaddressindex" + ]); + + POOL_ID_TO_POOL_NAME = /** @type {const} */ ({ + "unknown": "Unknown", + "blockfills": "BlockFills", + "ultimuspool": "ULTIMUSPOOL", + "terrapool": "Terra Pool", + "luxor": "Luxor", + "onethash": "1THash", + "btccom": "BTC.com", + "bitfarms": "Bitfarms", + "huobipool": "Huobi.pool", + "wayicn": "WAYI.CN", + "canoepool": "CanoePool", + "btctop": "BTC.TOP", + "bitcoincom": "Bitcoin.com", + "pool175btc": "175btc", + "gbminers": "GBMiners", + "axbt": "A-XBT", + "asicminer": "ASICMiner", + "bitminter": "BitMinter", + "bitcoinrussia": "BitcoinRussia", + "btcserv": "BTCServ", + "simplecoinus": "simplecoin.us", + "btcguild": "BTC Guild", + "eligius": "Eligius", + "ozcoin": "OzCoin", + "eclipsemc": "EclipseMC", + "maxbtc": "MaxBTC", + "triplemining": "TripleMining", + "coinlab": "CoinLab", + "pool50btc": "50BTC", + "ghashio": "GHash.IO", + "stminingcorp": "ST Mining Corp", + "bitparking": "Bitparking", + "mmpool": "mmpool", + "polmine": "Polmine", + "kncminer": "KnCMiner", + "bitalo": "Bitalo", + "f2pool": "F2Pool", + "hhtt": "HHTT", + "megabigpower": "MegaBigPower", + "mtred": "Mt Red", + "nmcbit": "NMCbit", + "yourbtcnet": "Yourbtc.net", + "givemecoins": "Give Me Coins", + "braiinspool": "Braiins Pool", + "antpool": "AntPool", + "multicoinco": "MultiCoin.co", + "bcpoolio": "bcpool.io", + "cointerra": "Cointerra", + "kanopool": "KanoPool", + "solock": "Solo CK", + "ckpool": "CKPool", + "nicehash": "NiceHash", + "bitclub": "BitClub", + "bitcoinaffiliatenetwork": "Bitcoin Affiliate Network", + "btcc": "BTCC", + "bwpool": "BWPool", + "exxbw": "EXX&BW", + "bitsolo": "Bitsolo", + "bitfury": "BitFury", + "twentyoneinc": "21 Inc.", + "digitalbtc": "digitalBTC", + "eightbaochi": "8baochi", + "mybtccoinpool": "myBTCcoin Pool", + "tbdice": "TBDice", + "hashpool": "HASHPOOL", + "nexious": "Nexious", + "bravomining": "Bravo Mining", + "hotpool": "HotPool", + "okexpool": "OKExPool", + "bcmonster": "BCMonster", + "onehash": "1Hash", + "bixin": "Bixin", + "tatmaspool": "TATMAS Pool", + "viabtc": "ViaBTC", + "connectbtc": "ConnectBTC", + "batpool": "BATPOOL", + "waterhole": "Waterhole", + "dcexploration": "DCExploration", + "dcex": "DCEX", + "btpool": "BTPOOL", + "fiftyeightcoin": "58COIN", + "bitcoinindia": "Bitcoin India", + "shawnp0wers": "shawnp0wers", + "phashio": "PHash.IO", + "rigpool": "RigPool", + "haozhuzhu": "HAOZHUZHU", + "sevenpool": "7pool", + "miningkings": "MiningKings", + "hashbx": "HashBX", + "dpool": "DPOOL", + "rawpool": "Rawpool", + "haominer": "haominer", + "helix": "Helix", + "bitcoinukraine": "Bitcoin-Ukraine", + "poolin": "Poolin", + "secretsuperstar": "SecretSuperstar", + "tigerpoolnet": "tigerpool.net", + "sigmapoolcom": "Sigmapool.com", + "okpooltop": "okpool.top", + "hummerpool": "Hummerpool", + "tangpool": "Tangpool", + "bytepool": "BytePool", + "spiderpool": "SpiderPool", + "novablock": "NovaBlock", + "miningcity": "MiningCity", + "binancepool": "Binance Pool", + "minerium": "Minerium", + "lubiancom": "Lubian.com", + "okkong": "OKKONG", + "aaopool": "AAO Pool", + "emcdpool": "EMCDPool", + "foundryusa": "Foundry USA", + "sbicrypto": "SBI Crypto", + "arkpool": "ArkPool", + "purebtccom": "PureBTC.COM", + "marapool": "MARA Pool", + "kucoinpool": "KuCoinPool", + "entrustcharitypool": "Entrust Charity Pool", + "okminer": "OKMINER", + "titan": "Titan", + "pegapool": "PEGA Pool", + "btcnuggets": "BTC Nuggets", + "cloudhashing": "CloudHashing", + "digitalxmintsy": "digitalX Mintsy", + "telco214": "Telco 214", + "btcpoolparty": "BTC Pool Party", + "multipool": "Multipool", + "transactioncoinmining": "transactioncoinmining", + "btcdig": "BTCDig", + "trickysbtcpool": "Tricky's BTC Pool", + "btcmp": "BTCMP", + "eobot": "Eobot", + "unomp": "UNOMP", + "patels": "Patels", + "gogreenlight": "GoGreenLight", + "ekanembtc": "EkanemBTC", + "canoe": "CANOE", + "tiger": "tiger", + "onem1x": "1M1X", + "zulupool": "Zulupool", + "secpool": "SECPOOL", + "ocean": "OCEAN", + "whitepool": "WhitePool", + "wk057": "wk057", + "futurebitapollosolo": "FutureBit Apollo Solo", + "carbonnegative": "Carbon Negative", + "portlandhodl": "Portland.HODL", + "phoenix": "Phoenix", + "neopool": "Neopool", + "maxipool": "MaxiPool", + "bitfufupool": "BitFuFuPool", + "luckypool": "luckyPool", + "miningdutch": "Mining-Dutch", + "publicpool": "Public Pool", + "miningsquared": "Mining Squared", + "innopolistech": "Innopolis Tech", + "btclab": "BTCLab", + "parasite": "Parasite" + }); + + TERM_NAMES = /** @type {const} */ ({ + "short": { + "id": "sth", + "short": "STH", + "long": "Short Term Holders" + }, + "long": { + "id": "lth", + "short": "LTH", + "long": "Long Term Holders" + } + }); + + EPOCH_NAMES = /** @type {const} */ ({ + "_0": { + "id": "epoch_0", + "short": "Epoch 0", + "long": "Epoch 0" + }, + "_1": { + "id": "epoch_1", + "short": "Epoch 1", + "long": "Epoch 1" + }, + "_2": { + "id": "epoch_2", + "short": "Epoch 2", + "long": "Epoch 2" + }, + "_3": { + "id": "epoch_3", + "short": "Epoch 3", + "long": "Epoch 3" + }, + "_4": { + "id": "epoch_4", + "short": "Epoch 4", + "long": "Epoch 4" + } + }); + + YEAR_NAMES = /** @type {const} */ ({ + "_2009": { + "id": "year_2009", + "short": "2009", + "long": "Year 2009" + }, + "_2010": { + "id": "year_2010", + "short": "2010", + "long": "Year 2010" + }, + "_2011": { + "id": "year_2011", + "short": "2011", + "long": "Year 2011" + }, + "_2012": { + "id": "year_2012", + "short": "2012", + "long": "Year 2012" + }, + "_2013": { + "id": "year_2013", + "short": "2013", + "long": "Year 2013" + }, + "_2014": { + "id": "year_2014", + "short": "2014", + "long": "Year 2014" + }, + "_2015": { + "id": "year_2015", + "short": "2015", + "long": "Year 2015" + }, + "_2016": { + "id": "year_2016", + "short": "2016", + "long": "Year 2016" + }, + "_2017": { + "id": "year_2017", + "short": "2017", + "long": "Year 2017" + }, + "_2018": { + "id": "year_2018", + "short": "2018", + "long": "Year 2018" + }, + "_2019": { + "id": "year_2019", + "short": "2019", + "long": "Year 2019" + }, + "_2020": { + "id": "year_2020", + "short": "2020", + "long": "Year 2020" + }, + "_2021": { + "id": "year_2021", + "short": "2021", + "long": "Year 2021" + }, + "_2022": { + "id": "year_2022", + "short": "2022", + "long": "Year 2022" + }, + "_2023": { + "id": "year_2023", + "short": "2023", + "long": "Year 2023" + }, + "_2024": { + "id": "year_2024", + "short": "2024", + "long": "Year 2024" + }, + "_2025": { + "id": "year_2025", + "short": "2025", + "long": "Year 2025" + }, + "_2026": { + "id": "year_2026", + "short": "2026", + "long": "Year 2026" + } + }); + + SPENDABLE_TYPE_NAMES = /** @type {const} */ ({ + "p2pk65": { + "id": "p2pk65", + "short": "P2PK65", + "long": "Pay to Public Key (65 bytes)" + }, + "p2pk33": { + "id": "p2pk33", + "short": "P2PK33", + "long": "Pay to Public Key (33 bytes)" + }, + "p2pkh": { + "id": "p2pkh", + "short": "P2PKH", + "long": "Pay to Public Key Hash" + }, + "p2ms": { + "id": "p2ms", + "short": "P2MS", + "long": "Pay to Multisig" + }, + "p2sh": { + "id": "p2sh", + "short": "P2SH", + "long": "Pay to Script Hash" + }, + "p2wpkh": { + "id": "p2wpkh", + "short": "P2WPKH", + "long": "Pay to Witness Public Key Hash" + }, + "p2wsh": { + "id": "p2wsh", + "short": "P2WSH", + "long": "Pay to Witness Script Hash" + }, + "p2tr": { + "id": "p2tr", + "short": "P2TR", + "long": "Pay to Taproot" + }, + "p2a": { + "id": "p2a", + "short": "P2A", + "long": "Pay to Anchor" + }, + "unknown": { + "id": "unknown_outputs", + "short": "Unknown", + "long": "Unknown Output Type" + }, + "empty": { + "id": "empty_outputs", + "short": "Empty", + "long": "Empty Output" + } + }); + + AGE_RANGE_NAMES = /** @type {const} */ ({ + "upTo1d": { + "id": "up_to_1d_old", + "short": "<1d", + "long": "Up to 1 Day Old" + }, + "_1dTo1w": { + "id": "at_least_1d_up_to_1w_old", + "short": "1d-1w", + "long": "1 Day to 1 Week Old" + }, + "_1wTo1m": { + "id": "at_least_1w_up_to_1m_old", + "short": "1w-1m", + "long": "1 Week to 1 Month Old" + }, + "_1mTo2m": { + "id": "at_least_1m_up_to_2m_old", + "short": "1m-2m", + "long": "1 to 2 Months Old" + }, + "_2mTo3m": { + "id": "at_least_2m_up_to_3m_old", + "short": "2m-3m", + "long": "2 to 3 Months Old" + }, + "_3mTo4m": { + "id": "at_least_3m_up_to_4m_old", + "short": "3m-4m", + "long": "3 to 4 Months Old" + }, + "_4mTo5m": { + "id": "at_least_4m_up_to_5m_old", + "short": "4m-5m", + "long": "4 to 5 Months Old" + }, + "_5mTo6m": { + "id": "at_least_5m_up_to_6m_old", + "short": "5m-6m", + "long": "5 to 6 Months Old" + }, + "_6mTo1y": { + "id": "at_least_6m_up_to_1y_old", + "short": "6m-1y", + "long": "6 Months to 1 Year Old" + }, + "_1yTo2y": { + "id": "at_least_1y_up_to_2y_old", + "short": "1y-2y", + "long": "1 to 2 Years Old" + }, + "_2yTo3y": { + "id": "at_least_2y_up_to_3y_old", + "short": "2y-3y", + "long": "2 to 3 Years Old" + }, + "_3yTo4y": { + "id": "at_least_3y_up_to_4y_old", + "short": "3y-4y", + "long": "3 to 4 Years Old" + }, + "_4yTo5y": { + "id": "at_least_4y_up_to_5y_old", + "short": "4y-5y", + "long": "4 to 5 Years Old" + }, + "_5yTo6y": { + "id": "at_least_5y_up_to_6y_old", + "short": "5y-6y", + "long": "5 to 6 Years Old" + }, + "_6yTo7y": { + "id": "at_least_6y_up_to_7y_old", + "short": "6y-7y", + "long": "6 to 7 Years Old" + }, + "_7yTo8y": { + "id": "at_least_7y_up_to_8y_old", + "short": "7y-8y", + "long": "7 to 8 Years Old" + }, + "_8yTo10y": { + "id": "at_least_8y_up_to_10y_old", + "short": "8y-10y", + "long": "8 to 10 Years Old" + }, + "_10yTo12y": { + "id": "at_least_10y_up_to_12y_old", + "short": "10y-12y", + "long": "10 to 12 Years Old" + }, + "_12yTo15y": { + "id": "at_least_12y_up_to_15y_old", + "short": "12y-15y", + "long": "12 to 15 Years Old" + }, + "from15y": { + "id": "at_least_15y_old", + "short": "15y+", + "long": "15+ Years Old" + } + }); + + MAX_AGE_NAMES = /** @type {const} */ ({ + "_1w": { + "id": "up_to_1w_old", + "short": "<1w", + "long": "Up to 1 Week Old" + }, + "_1m": { + "id": "up_to_1m_old", + "short": "<1m", + "long": "Up to 1 Month Old" + }, + "_2m": { + "id": "up_to_2m_old", + "short": "<2m", + "long": "Up to 2 Months Old" + }, + "_3m": { + "id": "up_to_3m_old", + "short": "<3m", + "long": "Up to 3 Months Old" + }, + "_4m": { + "id": "up_to_4m_old", + "short": "<4m", + "long": "Up to 4 Months Old" + }, + "_5m": { + "id": "up_to_5m_old", + "short": "<5m", + "long": "Up to 5 Months Old" + }, + "_6m": { + "id": "up_to_6m_old", + "short": "<6m", + "long": "Up to 6 Months Old" + }, + "_1y": { + "id": "up_to_1y_old", + "short": "<1y", + "long": "Up to 1 Year Old" + }, + "_2y": { + "id": "up_to_2y_old", + "short": "<2y", + "long": "Up to 2 Years Old" + }, + "_3y": { + "id": "up_to_3y_old", + "short": "<3y", + "long": "Up to 3 Years Old" + }, + "_4y": { + "id": "up_to_4y_old", + "short": "<4y", + "long": "Up to 4 Years Old" + }, + "_5y": { + "id": "up_to_5y_old", + "short": "<5y", + "long": "Up to 5 Years Old" + }, + "_6y": { + "id": "up_to_6y_old", + "short": "<6y", + "long": "Up to 6 Years Old" + }, + "_7y": { + "id": "up_to_7y_old", + "short": "<7y", + "long": "Up to 7 Years Old" + }, + "_8y": { + "id": "up_to_8y_old", + "short": "<8y", + "long": "Up to 8 Years Old" + }, + "_10y": { + "id": "up_to_10y_old", + "short": "<10y", + "long": "Up to 10 Years Old" + }, + "_12y": { + "id": "up_to_12y_old", + "short": "<12y", + "long": "Up to 12 Years Old" + }, + "_15y": { + "id": "up_to_15y_old", + "short": "<15y", + "long": "Up to 15 Years Old" + } + }); + + MIN_AGE_NAMES = /** @type {const} */ ({ + "_1d": { + "id": "at_least_1d_old", + "short": "1d+", + "long": "At Least 1 Day Old" + }, + "_1w": { + "id": "at_least_1w_old", + "short": "1w+", + "long": "At Least 1 Week Old" + }, + "_1m": { + "id": "at_least_1m_old", + "short": "1m+", + "long": "At Least 1 Month Old" + }, + "_2m": { + "id": "at_least_2m_old", + "short": "2m+", + "long": "At Least 2 Months Old" + }, + "_3m": { + "id": "at_least_3m_old", + "short": "3m+", + "long": "At Least 3 Months Old" + }, + "_4m": { + "id": "at_least_4m_old", + "short": "4m+", + "long": "At Least 4 Months Old" + }, + "_5m": { + "id": "at_least_5m_old", + "short": "5m+", + "long": "At Least 5 Months Old" + }, + "_6m": { + "id": "at_least_6m_old", + "short": "6m+", + "long": "At Least 6 Months Old" + }, + "_1y": { + "id": "at_least_1y_old", + "short": "1y+", + "long": "At Least 1 Year Old" + }, + "_2y": { + "id": "at_least_2y_old", + "short": "2y+", + "long": "At Least 2 Years Old" + }, + "_3y": { + "id": "at_least_3y_old", + "short": "3y+", + "long": "At Least 3 Years Old" + }, + "_4y": { + "id": "at_least_4y_old", + "short": "4y+", + "long": "At Least 4 Years Old" + }, + "_5y": { + "id": "at_least_5y_old", + "short": "5y+", + "long": "At Least 5 Years Old" + }, + "_6y": { + "id": "at_least_6y_old", + "short": "6y+", + "long": "At Least 6 Years Old" + }, + "_7y": { + "id": "at_least_7y_old", + "short": "7y+", + "long": "At Least 7 Years Old" + }, + "_8y": { + "id": "at_least_8y_old", + "short": "8y+", + "long": "At Least 8 Years Old" + }, + "_10y": { + "id": "at_least_10y_old", + "short": "10y+", + "long": "At Least 10 Years Old" + }, + "_12y": { + "id": "at_least_12y_old", + "short": "12y+", + "long": "At Least 12 Years Old" + } + }); + + AMOUNT_RANGE_NAMES = /** @type {const} */ ({ + "_0sats": { + "id": "with_0sats", + "short": "0 sats", + "long": "0 Sats" + }, + "_1satTo10sats": { + "id": "above_1sat_under_10sats", + "short": "1-10 sats", + "long": "1 to 10 Sats" + }, + "_10satsTo100sats": { + "id": "above_10sats_under_100sats", + "short": "10-100 sats", + "long": "10 to 100 Sats" + }, + "_100satsTo1kSats": { + "id": "above_100sats_under_1k_sats", + "short": "100-1k sats", + "long": "100 to 1K Sats" + }, + "_1kSatsTo10kSats": { + "id": "above_1k_sats_under_10k_sats", + "short": "1k-10k sats", + "long": "1K to 10K Sats" + }, + "_10kSatsTo100kSats": { + "id": "above_10k_sats_under_100k_sats", + "short": "10k-100k sats", + "long": "10K to 100K Sats" + }, + "_100kSatsTo1mSats": { + "id": "above_100k_sats_under_1m_sats", + "short": "100k-1M sats", + "long": "100K to 1M Sats" + }, + "_1mSatsTo10mSats": { + "id": "above_1m_sats_under_10m_sats", + "short": "1M-10M sats", + "long": "1M to 10M Sats" + }, + "_10mSatsTo1btc": { + "id": "above_10m_sats_under_1btc", + "short": "0.1-1 BTC", + "long": "0.1 to 1 BTC" + }, + "_1btcTo10btc": { + "id": "above_1btc_under_10btc", + "short": "1-10 BTC", + "long": "1 to 10 BTC" + }, + "_10btcTo100btc": { + "id": "above_10btc_under_100btc", + "short": "10-100 BTC", + "long": "10 to 100 BTC" + }, + "_100btcTo1kBtc": { + "id": "above_100btc_under_1k_btc", + "short": "100-1k BTC", + "long": "100 to 1K BTC" + }, + "_1kBtcTo10kBtc": { + "id": "above_1k_btc_under_10k_btc", + "short": "1k-10k BTC", + "long": "1K to 10K BTC" + }, + "_10kBtcTo100kBtc": { + "id": "above_10k_btc_under_100k_btc", + "short": "10k-100k BTC", + "long": "10K to 100K BTC" + }, + "_100kBtcOrMore": { + "id": "above_100k_btc", + "short": "100k+ BTC", + "long": "100K+ BTC" + } + }); + + GE_AMOUNT_NAMES = /** @type {const} */ ({ + "_1sat": { + "id": "above_1sat", + "short": "1+ sats", + "long": "Above 1 Sat" + }, + "_10sats": { + "id": "above_10sats", + "short": "10+ sats", + "long": "Above 10 Sats" + }, + "_100sats": { + "id": "above_100sats", + "short": "100+ sats", + "long": "Above 100 Sats" + }, + "_1kSats": { + "id": "above_1k_sats", + "short": "1k+ sats", + "long": "Above 1K Sats" + }, + "_10kSats": { + "id": "above_10k_sats", + "short": "10k+ sats", + "long": "Above 10K Sats" + }, + "_100kSats": { + "id": "above_100k_sats", + "short": "100k+ sats", + "long": "Above 100K Sats" + }, + "_1mSats": { + "id": "above_1m_sats", + "short": "1M+ sats", + "long": "Above 1M Sats" + }, + "_10mSats": { + "id": "above_10m_sats", + "short": "0.1+ BTC", + "long": "Above 0.1 BTC" + }, + "_1btc": { + "id": "above_1btc", + "short": "1+ BTC", + "long": "Above 1 BTC" + }, + "_10btc": { + "id": "above_10btc", + "short": "10+ BTC", + "long": "Above 10 BTC" + }, + "_100btc": { + "id": "above_100btc", + "short": "100+ BTC", + "long": "Above 100 BTC" + }, + "_1kBtc": { + "id": "above_1k_btc", + "short": "1k+ BTC", + "long": "Above 1K BTC" + }, + "_10kBtc": { + "id": "above_10k_btc", + "short": "10k+ BTC", + "long": "Above 10K BTC" + } + }); + + LT_AMOUNT_NAMES = /** @type {const} */ ({ + "_10sats": { + "id": "under_10sats", + "short": "<10 sats", + "long": "Under 10 Sats" + }, + "_100sats": { + "id": "under_100sats", + "short": "<100 sats", + "long": "Under 100 Sats" + }, + "_1kSats": { + "id": "under_1k_sats", + "short": "<1k sats", + "long": "Under 1K Sats" + }, + "_10kSats": { + "id": "under_10k_sats", + "short": "<10k sats", + "long": "Under 10K Sats" + }, + "_100kSats": { + "id": "under_100k_sats", + "short": "<100k sats", + "long": "Under 100K Sats" + }, + "_1mSats": { + "id": "under_1m_sats", + "short": "<1M sats", + "long": "Under 1M Sats" + }, + "_10mSats": { + "id": "under_10m_sats", + "short": "<0.1 BTC", + "long": "Under 0.1 BTC" + }, + "_1btc": { + "id": "under_1btc", + "short": "<1 BTC", + "long": "Under 1 BTC" + }, + "_10btc": { + "id": "under_10btc", + "short": "<10 BTC", + "long": "Under 10 BTC" + }, + "_100btc": { + "id": "under_100btc", + "short": "<100 BTC", + "long": "Under 100 BTC" + }, + "_1kBtc": { + "id": "under_1k_btc", + "short": "<1k BTC", + "long": "Under 1K BTC" + }, + "_10kBtc": { + "id": "under_10k_btc", + "short": "<10k BTC", + "long": "Under 10K BTC" + }, + "_100kBtc": { + "id": "under_100k_btc", + "short": "<100k BTC", + "long": "Under 100K BTC" + } + }); + /** * @param {BrkClientOptions|string} options */ @@ -4658,143 +5533,114 @@ class BrkClient extends BrkClientBase { _buildTree(basePath) { return { computed: { - blks: { - position: new MetricNode(this, '/position') - }, - chain: { - _1mBlockCount: createIndexes(this, '/1m_block_count'), - _1wBlockCount: createIndexes(this, '/1w_block_count'), - _1yBlockCount: createIndexes(this, '/1y_block_count'), - _24hBlockCount: createIndexes2(this, '/24h_block_count'), - _24hCoinbaseSum: createIndexes2(this, '/24h_coinbase_sum'), - _24hCoinbaseUsdSum: createIndexes2(this, '/24h_coinbase_usd_sum'), - annualizedVolume: createIndexes(this, '/annualized_volume'), - annualizedVolumeBtc: createIndexes(this, '/annualized_volume_btc'), - annualizedVolumeUsd: createIndexes(this, '/annualized_volume_usd'), - blockCount: createBlockCountPattern(this, 'computed_chain/block_count'), - blockCountTarget: createIndexes(this, '/block_count_target'), - blockInterval: createBlockIntervalPattern(this, 'block_interval'), - blockSize: createBlockSizePattern(this, 'computed_chain/block_size'), - blockVbytes: createBlockSizePattern(this, 'computed_chain/block_vbytes'), - blockWeight: createBlockSizePattern(this, 'computed_chain/block_weight'), - blocksBeforeNextDifficultyAdjustment: createIndexes3(this, '/blocks_before_next_difficulty_adjustment'), - blocksBeforeNextHalving: createIndexes3(this, '/blocks_before_next_halving'), - coinbase: createCoinbasePattern(this, 'computed_chain/coinbase'), - daysBeforeNextDifficultyAdjustment: createIndexes3(this, '/days_before_next_difficulty_adjustment'), - daysBeforeNextHalving: createIndexes3(this, '/days_before_next_halving'), - difficulty: createIndexes4(this, '/difficulty'), - difficultyAdjustment: createIndexes3(this, '/difficulty_adjustment'), - difficultyAsHash: createIndexes3(this, '/difficulty_as_hash'), - difficultyepoch: createIndexes(this, '/difficultyepoch'), - emptyoutputCount: createBitcoinPattern(this, 'computed_chain/emptyoutput_count'), - exactUtxoCount: createIndexes3(this, '/exact_utxo_count'), - fee: { - base: createIndexes6(this, '/fee'), - bitcoin: createBlockSizePattern(this, 'fee/bitcoin'), - bitcoinTxindex: createIndexes6(this, '/fee_btc'), - dollars: createBlockSizePattern(this, 'fee/dollars'), - dollarsTxindex: createIndexes6(this, '/fee_usd'), - sats: createBlockSizePattern(this, 'fee/sats') + blocks: { + count: { + _1mBlockCount: createMetricPattern4(this, '1m_block_count'), + _1wBlockCount: createMetricPattern4(this, '1w_block_count'), + _1yBlockCount: createMetricPattern4(this, '1y_block_count'), + _24hBlockCount: createMetricPattern25(this, '24h_block_count'), + blockCount: createBlockCountPattern(this, 'block_count'), + blockCountTarget: createMetricPattern4(this, 'block_count_target') }, - feeDominance: createIndexes5(this, '/fee_dominance'), - feeRate: { - average: createIndexes3(this, '/fee_rate_avg'), - base: createIndexes6(this, '/fee_rate'), - max: createIndexes3(this, '/fee_rate_max'), - median: createIndexes2(this, '/fee_rate_median'), - min: createIndexes3(this, '/fee_rate_min'), - pct10: createIndexes2(this, '/fee_rate_pct10'), - pct25: createIndexes2(this, '/fee_rate_pct25'), - pct75: createIndexes2(this, '/fee_rate_pct75'), - pct90: createIndexes2(this, '/fee_rate_pct90') + difficulty: { + blocksBeforeNextDifficultyAdjustment: createMetricPattern1(this, 'blocks_before_next_difficulty_adjustment'), + daysBeforeNextDifficultyAdjustment: createMetricPattern1(this, 'days_before_next_difficulty_adjustment'), + difficultyepoch: createMetricPattern4(this, 'difficultyepoch') }, - halvingepoch: createIndexes(this, '/halvingepoch'), - hashPricePhs: createIndexes3(this, '/hash_price_phs'), - hashPricePhsMin: createIndexes3(this, '/hash_price_phs_min'), - hashPriceRebound: createIndexes3(this, '/hash_price_rebound'), - hashPriceThs: createIndexes3(this, '/hash_price_ths'), - hashPriceThsMin: createIndexes3(this, '/hash_price_ths_min'), - hashRate: createIndexes3(this, '/hash_rate'), - hashRate1mSma: createIndexes(this, '/hash_rate_1m_sma'), - hashRate1wSma: createIndexes(this, '/hash_rate_1w_sma'), - hashRate1ySma: createIndexes(this, '/hash_rate_1y_sma'), - hashRate2mSma: createIndexes(this, '/hash_rate_2m_sma'), - hashValuePhs: createIndexes3(this, '/hash_value_phs'), - hashValuePhsMin: createIndexes3(this, '/hash_value_phs_min'), - hashValueRebound: createIndexes3(this, '/hash_value_rebound'), - hashValueThs: createIndexes3(this, '/hash_value_ths'), - hashValueThsMin: createIndexes3(this, '/hash_value_ths_min'), - inflationRate: createIndexes(this, '/inflation_rate'), - inputCount: createBlockSizePattern(this, 'computed_chain/input_count'), - inputValue: createIndexes6(this, '/input_value'), - inputsPerSec: createIndexes(this, '/inputs_per_sec'), - interval: createIndexes2(this, '/interval'), - isCoinbase: createIndexes6(this, '/is_coinbase'), - opreturnCount: createBitcoinPattern(this, 'computed_chain/opreturn_count'), - outputCount: createBlockSizePattern(this, 'computed_chain/output_count'), - outputValue: createIndexes6(this, '/output_value'), - outputsPerSec: createIndexes(this, '/outputs_per_sec'), - p2aCount: createBitcoinPattern(this, 'computed_chain/p2a_count'), - p2msCount: createBitcoinPattern(this, 'computed_chain/p2ms_count'), - p2pk33Count: createBitcoinPattern(this, 'computed_chain/p2pk33_count'), - p2pk65Count: createBitcoinPattern(this, 'computed_chain/p2pk65_count'), - p2pkhCount: createBitcoinPattern(this, 'computed_chain/p2pkh_count'), - p2shCount: createBitcoinPattern(this, 'computed_chain/p2sh_count'), - p2trCount: createBitcoinPattern(this, 'computed_chain/p2tr_count'), - p2wpkhCount: createBitcoinPattern(this, 'computed_chain/p2wpkh_count'), - p2wshCount: createBitcoinPattern(this, 'computed_chain/p2wsh_count'), - puellMultiple: createIndexes(this, '/puell_multiple'), - sentSum: { - bitcoin: createBitcoinPattern2(this, 'sent_sum/bitcoin'), - dollars: createIndexes3(this, '/sent_sum_usd'), - sats: createIndexes3(this, '/sent_sum') + halving: { + blocksBeforeNextHalving: createMetricPattern1(this, 'blocks_before_next_halving'), + daysBeforeNextHalving: createMetricPattern1(this, 'days_before_next_halving'), + halvingepoch: createMetricPattern4(this, 'halvingepoch') }, - subsidy: createCoinbasePattern(this, 'computed_chain/subsidy'), - subsidyDominance: createIndexes5(this, '/subsidy_dominance'), - subsidyUsd1ySma: createIndexes(this, '/subsidy_usd_1y_sma'), - timestamp: new MetricNode(this, '/timestamp'), - txBtcVelocity: createIndexes(this, '/tx_btc_velocity'), - txCount: createBitcoinPattern(this, 'computed_chain/tx_count'), - txPerSec: createIndexes(this, '/tx_per_sec'), - txUsdVelocity: createIndexes(this, '/tx_usd_velocity'), - txV1: createBlockCountPattern(this, 'computed_chain/tx_v1'), - txV2: createBlockCountPattern(this, 'computed_chain/tx_v2'), - txV3: createBlockCountPattern(this, 'computed_chain/tx_v3'), - txVsize: createBlockIntervalPattern(this, 'tx_vsize'), - txWeight: createBlockIntervalPattern(this, 'tx_weight'), - unclaimedRewards: createUnclaimedRewardsPattern(this, 'computed_chain/unclaimed_rewards'), - unknownoutputCount: createBitcoinPattern(this, 'computed_chain/unknownoutput_count'), - vbytes: createIndexes2(this, '/vbytes'), - vsize: createIndexes6(this, '/vsize'), - weight: createIndexes6(this, '/weight') + interval: { + blockInterval: createBlockIntervalPattern(this, 'block_interval'), + interval: createMetricPattern25(this, 'interval') + }, + mining: { + difficulty: createMetricPattern2(this, 'difficulty'), + difficultyAdjustment: createMetricPattern1(this, 'difficulty_adjustment'), + difficultyAsHash: createMetricPattern1(this, 'difficulty_as_hash'), + hashPricePhs: createMetricPattern1(this, 'hash_price_phs'), + hashPricePhsMin: createMetricPattern1(this, 'hash_price_phs_min'), + hashPriceRebound: createMetricPattern1(this, 'hash_price_rebound'), + hashPriceThs: createMetricPattern1(this, 'hash_price_ths'), + hashPriceThsMin: createMetricPattern1(this, 'hash_price_ths_min'), + hashRate: createMetricPattern1(this, 'hash_rate'), + hashRate1mSma: createMetricPattern4(this, 'hash_rate_1m_sma'), + hashRate1wSma: createMetricPattern4(this, 'hash_rate_1w_sma'), + hashRate1ySma: createMetricPattern4(this, 'hash_rate_1y_sma'), + hashRate2mSma: createMetricPattern4(this, 'hash_rate_2m_sma'), + hashValuePhs: createMetricPattern1(this, 'hash_value_phs'), + hashValuePhsMin: createMetricPattern1(this, 'hash_value_phs_min'), + hashValueRebound: createMetricPattern1(this, 'hash_value_rebound'), + hashValueThs: createMetricPattern1(this, 'hash_value_ths'), + hashValueThsMin: createMetricPattern1(this, 'hash_value_ths_min') + }, + rewards: { + _24hCoinbaseSum: createMetricPattern25(this, '24h_coinbase_sum'), + _24hCoinbaseUsdSum: createMetricPattern25(this, '24h_coinbase_usd_sum'), + coinbase: createCoinbasePattern(this, 'coinbase'), + feeDominance: createMetricPattern21(this, 'fee_dominance'), + subsidy: createCoinbasePattern(this, 'subsidy'), + subsidyDominance: createMetricPattern21(this, 'subsidy_dominance'), + subsidyUsd1ySma: createMetricPattern4(this, 'subsidy_usd_1y_sma'), + unclaimedRewards: createUnclaimedRewardsPattern(this, 'unclaimed_rewards') + }, + size: { + blockSize: createBlockSizePattern(this, 'block_size'), + blockVbytes: createBlockSizePattern(this, 'block_vbytes'), + vbytes: createMetricPattern25(this, 'vbytes') + }, + time: { + date: createMetricPattern25(this, 'date'), + dateFixed: createMetricPattern25(this, 'date_fixed'), + timestamp: createMetricPattern2(this, 'timestamp'), + timestampFixed: createMetricPattern25(this, 'timestamp_fixed') + }, + weight: { + blockFullness: createBitcoinPattern(this, 'block_fullness'), + blockWeight: createBlockSizePattern(this, 'block_weight') + } }, cointime: { - activeCap: createIndexes3(this, '/active_cap'), - activePrice: createIndexes3(this, '/active_price'), - activePriceRatio: createActivePriceRatioPattern(this, 'computed_cointime/active_price_ratio'), - activeSupply: createActiveSupplyPattern(this, 'computed_cointime/active_supply'), - activityToVaultednessRatio: createIndexes3(this, '/activity_to_vaultedness_ratio'), - coinblocksCreated: createBlockCountPattern(this, 'computed_cointime/coinblocks_created'), - coinblocksStored: createBlockCountPattern(this, 'computed_cointime/coinblocks_stored'), - cointimeAdjInflationRate: createIndexes(this, '/cointime_adj_inflation_rate'), - cointimeAdjTxBtcVelocity: createIndexes(this, '/cointime_adj_tx_btc_velocity'), - cointimeAdjTxUsdVelocity: createIndexes(this, '/cointime_adj_tx_usd_velocity'), - cointimeCap: createIndexes3(this, '/cointime_cap'), - cointimePrice: createIndexes3(this, '/cointime_price'), - cointimePriceRatio: createActivePriceRatioPattern(this, 'computed_cointime/cointime_price_ratio'), - cointimeValueCreated: createBlockCountPattern(this, 'computed_cointime/cointime_value_created'), - cointimeValueDestroyed: createBlockCountPattern(this, 'computed_cointime/cointime_value_destroyed'), - cointimeValueStored: createBlockCountPattern(this, 'computed_cointime/cointime_value_stored'), - investorCap: createIndexes3(this, '/investor_cap'), - liveliness: createIndexes3(this, '/liveliness'), - thermoCap: createIndexes3(this, '/thermo_cap'), - trueMarketMean: createIndexes3(this, '/true_market_mean'), - trueMarketMeanRatio: createActivePriceRatioPattern(this, 'computed_cointime/true_market_mean_ratio'), - vaultedCap: createIndexes3(this, '/vaulted_cap'), - vaultedPrice: createIndexes3(this, '/vaulted_price'), - vaultedPriceRatio: createActivePriceRatioPattern(this, 'computed_cointime/vaulted_price_ratio'), - vaultedSupply: createActiveSupplyPattern(this, 'computed_cointime/vaulted_supply'), - vaultedness: createIndexes3(this, '/vaultedness') + activity: { + activityToVaultednessRatio: createMetricPattern1(this, 'activity_to_vaultedness_ratio'), + coinblocksCreated: createBlockCountPattern(this, 'coinblocks_created'), + coinblocksStored: createBlockCountPattern(this, 'coinblocks_stored'), + liveliness: createMetricPattern1(this, 'liveliness'), + vaultedness: createMetricPattern1(this, 'vaultedness') + }, + adjusted: { + cointimeAdjInflationRate: createMetricPattern4(this, 'cointime_adj_inflation_rate'), + cointimeAdjTxBtcVelocity: createMetricPattern4(this, 'cointime_adj_tx_btc_velocity'), + cointimeAdjTxUsdVelocity: createMetricPattern4(this, 'cointime_adj_tx_usd_velocity') + }, + cap: { + activeCap: createMetricPattern1(this, 'active_cap'), + cointimeCap: createMetricPattern1(this, 'cointime_cap'), + investorCap: createMetricPattern1(this, 'investor_cap'), + thermoCap: createMetricPattern1(this, 'thermo_cap'), + vaultedCap: createMetricPattern1(this, 'vaulted_cap') + }, + pricing: { + activePrice: createMetricPattern1(this, 'active_price'), + activePriceRatio: createActivePriceRatioPattern(this, 'active_price_ratio'), + cointimePrice: createMetricPattern1(this, 'cointime_price'), + cointimePriceRatio: createActivePriceRatioPattern(this, 'cointime_price_ratio'), + trueMarketMean: createMetricPattern1(this, 'true_market_mean'), + trueMarketMeanRatio: createActivePriceRatioPattern(this, 'true_market_mean_ratio'), + vaultedPrice: createMetricPattern1(this, 'vaulted_price'), + vaultedPriceRatio: createActivePriceRatioPattern(this, 'vaulted_price_ratio') + }, + supply: { + activeSupply: createActiveSupplyPattern(this, 'active_supply'), + vaultedSupply: createActiveSupplyPattern(this, 'vaulted_supply') + }, + value: { + cointimeValueCreated: createBlockCountPattern(this, 'cointime_value_created'), + cointimeValueDestroyed: createBlockCountPattern(this, 'cointime_value_destroyed'), + cointimeValueStored: createBlockCountPattern(this, 'cointime_value_stored') + } }, constants: { constant0: createConstant0Pattern(this, 'constant_0'), @@ -4812,696 +5658,879 @@ class BrkClient extends BrkClientBase { constantMinus3: createConstant0Pattern(this, 'constant_minus_3'), constantMinus4: createConstant0Pattern(this, 'constant_minus_4') }, - fetched: { - priceOhlcInCents: createIndexes13(this, '/price_ohlc_in_cents') - }, - indexes: { - date: createIndexes13(this, '/date'), - dateFixed: createIndexes2(this, '/date_fixed'), - dateindex: createIndexes13(this, '/dateindex'), - dateindexCount: createIndexes14(this, '/dateindex_count'), - decadeindex: new MetricNode(this, '/decadeindex'), - difficultyepoch: new MetricNode(this, '/difficultyepoch'), - emptyoutputindex: new MetricNode(this, '/emptyoutputindex'), - firstDateindex: createIndexes14(this, '/first_dateindex'), - firstHeight: new MetricNode(this, '/first_height'), - firstMonthindex: createIndexes15(this, '/first_monthindex'), - firstYearindex: createIndexes7(this, '/first_yearindex'), - halvingepoch: new MetricNode(this, '/halvingepoch'), - height: createIndexes2(this, '/height'), - heightCount: new MetricNode(this, '/height_count'), - inputCount: createIndexes6(this, '/input_count'), - monthindex: new MetricNode(this, '/monthindex'), - monthindexCount: createIndexes15(this, '/monthindex_count'), - opreturnindex: new MetricNode(this, '/opreturnindex'), - outputCount: createIndexes6(this, '/output_count'), - p2aaddressindex: createIndexes16(this, '/p2aaddressindex'), - p2msoutputindex: new MetricNode(this, '/p2msoutputindex'), - p2pk33addressindex: createIndexes17(this, '/p2pk33addressindex'), - p2pk65addressindex: createIndexes18(this, '/p2pk65addressindex'), - p2pkhaddressindex: createIndexes19(this, '/p2pkhaddressindex'), - p2shaddressindex: createIndexes20(this, '/p2shaddressindex'), - p2traddressindex: createIndexes21(this, '/p2traddressindex'), - p2wpkhaddressindex: createIndexes22(this, '/p2wpkhaddressindex'), - p2wshaddressindex: createIndexes23(this, '/p2wshaddressindex'), - quarterindex: new MetricNode(this, '/quarterindex'), - semesterindex: new MetricNode(this, '/semesterindex'), - timestampFixed: createIndexes2(this, '/timestamp_fixed'), - txindex: createIndexes6(this, '/txindex'), - txindexCount: createIndexes2(this, '/txindex_count'), - txinindex: createIndexes24(this, '/txinindex'), - txoutindex: createIndexes25(this, '/txoutindex'), - unknownoutputindex: new MetricNode(this, '/unknownoutputindex'), - weekindex: new MetricNode(this, '/weekindex'), - yearindex: new MetricNode(this, '/yearindex'), - yearindexCount: createIndexes7(this, '/yearindex_count') - }, - market: { - _1dReturns1mSd: create_1dReturns1mSdPattern(this, '1d_returns_1m_sd'), - _1dReturns1wSd: create_1dReturns1mSdPattern(this, '1d_returns_1w_sd'), - _1dReturns1ySd: create_1dReturns1mSdPattern(this, '1d_returns_1y_sd'), - _10yCagr: createIndexes(this, '/10y_cagr'), - _10yDcaAvgPrice: createIndexes(this, '/10y_dca_avg_price'), - _10yDcaCagr: createIndexes(this, '/10y_dca_cagr'), - _10yDcaReturns: createIndexes(this, '/10y_dca_returns'), - _10yDcaStack: createIndexes(this, '/10y_dca_stack'), - _10yPriceReturns: createIndexes(this, '/10y_price_returns'), - _1dPriceReturns: createIndexes(this, '/1d_price_returns'), - _1mDcaAvgPrice: createIndexes(this, '/1m_dca_avg_price'), - _1mDcaReturns: createIndexes(this, '/1m_dca_returns'), - _1mDcaStack: createIndexes(this, '/1m_dca_stack'), - _1mPriceReturns: createIndexes(this, '/1m_price_returns'), - _1wDcaAvgPrice: createIndexes(this, '/1w_dca_avg_price'), - _1wDcaReturns: createIndexes(this, '/1w_dca_returns'), - _1wDcaStack: createIndexes(this, '/1w_dca_stack'), - _1wPriceReturns: createIndexes(this, '/1w_price_returns'), - _1yDcaAvgPrice: createIndexes(this, '/1y_dca_avg_price'), - _1yDcaReturns: createIndexes(this, '/1y_dca_returns'), - _1yDcaStack: createIndexes(this, '/1y_dca_stack'), - _1yPriceReturns: createIndexes(this, '/1y_price_returns'), - _2yCagr: createIndexes(this, '/2y_cagr'), - _2yDcaAvgPrice: createIndexes(this, '/2y_dca_avg_price'), - _2yDcaCagr: createIndexes(this, '/2y_dca_cagr'), - _2yDcaReturns: createIndexes(this, '/2y_dca_returns'), - _2yDcaStack: createIndexes(this, '/2y_dca_stack'), - _2yPriceReturns: createIndexes(this, '/2y_price_returns'), - _3mDcaAvgPrice: createIndexes(this, '/3m_dca_avg_price'), - _3mDcaReturns: createIndexes(this, '/3m_dca_returns'), - _3mDcaStack: createIndexes(this, '/3m_dca_stack'), - _3mPriceReturns: createIndexes(this, '/3m_price_returns'), - _3yCagr: createIndexes(this, '/3y_cagr'), - _3yDcaAvgPrice: createIndexes(this, '/3y_dca_avg_price'), - _3yDcaCagr: createIndexes(this, '/3y_dca_cagr'), - _3yDcaReturns: createIndexes(this, '/3y_dca_returns'), - _3yDcaStack: createIndexes(this, '/3y_dca_stack'), - _3yPriceReturns: createIndexes(this, '/3y_price_returns'), - _4yCagr: createIndexes(this, '/4y_cagr'), - _4yDcaAvgPrice: createIndexes(this, '/4y_dca_avg_price'), - _4yDcaCagr: createIndexes(this, '/4y_dca_cagr'), - _4yDcaReturns: createIndexes(this, '/4y_dca_returns'), - _4yDcaStack: createIndexes(this, '/4y_dca_stack'), - _4yPriceReturns: createIndexes(this, '/4y_price_returns'), - _5yCagr: createIndexes(this, '/5y_cagr'), - _5yDcaAvgPrice: createIndexes(this, '/5y_dca_avg_price'), - _5yDcaCagr: createIndexes(this, '/5y_dca_cagr'), - _5yDcaReturns: createIndexes(this, '/5y_dca_returns'), - _5yDcaStack: createIndexes(this, '/5y_dca_stack'), - _5yPriceReturns: createIndexes(this, '/5y_price_returns'), - _6mDcaAvgPrice: createIndexes(this, '/6m_dca_avg_price'), - _6mDcaReturns: createIndexes(this, '/6m_dca_returns'), - _6mDcaStack: createIndexes(this, '/6m_dca_stack'), - _6mPriceReturns: createIndexes(this, '/6m_price_returns'), - _6yCagr: createIndexes(this, '/6y_cagr'), - _6yDcaAvgPrice: createIndexes(this, '/6y_dca_avg_price'), - _6yDcaCagr: createIndexes(this, '/6y_dca_cagr'), - _6yDcaReturns: createIndexes(this, '/6y_dca_returns'), - _6yDcaStack: createIndexes(this, '/6y_dca_stack'), - _6yPriceReturns: createIndexes(this, '/6y_price_returns'), - _8yCagr: createIndexes(this, '/8y_cagr'), - _8yDcaAvgPrice: createIndexes(this, '/8y_dca_avg_price'), - _8yDcaCagr: createIndexes(this, '/8y_dca_cagr'), - _8yDcaReturns: createIndexes(this, '/8y_dca_returns'), - _8yDcaStack: createIndexes(this, '/8y_dca_stack'), - _8yPriceReturns: createIndexes(this, '/8y_price_returns'), - daysSincePriceAth: createIndexes(this, '/days_since_price_ath'), - dcaClass2015AvgPrice: createIndexes(this, '/dca_class_2015_avg_price'), - dcaClass2015Returns: createIndexes(this, '/dca_class_2015_returns'), - dcaClass2015Stack: createIndexes(this, '/dca_class_2015_stack'), - dcaClass2016AvgPrice: createIndexes(this, '/dca_class_2016_avg_price'), - dcaClass2016Returns: createIndexes(this, '/dca_class_2016_returns'), - dcaClass2016Stack: createIndexes(this, '/dca_class_2016_stack'), - dcaClass2017AvgPrice: createIndexes(this, '/dca_class_2017_avg_price'), - dcaClass2017Returns: createIndexes(this, '/dca_class_2017_returns'), - dcaClass2017Stack: createIndexes(this, '/dca_class_2017_stack'), - dcaClass2018AvgPrice: createIndexes(this, '/dca_class_2018_avg_price'), - dcaClass2018Returns: createIndexes(this, '/dca_class_2018_returns'), - dcaClass2018Stack: createIndexes(this, '/dca_class_2018_stack'), - dcaClass2019AvgPrice: createIndexes(this, '/dca_class_2019_avg_price'), - dcaClass2019Returns: createIndexes(this, '/dca_class_2019_returns'), - dcaClass2019Stack: createIndexes(this, '/dca_class_2019_stack'), - dcaClass2020AvgPrice: createIndexes(this, '/dca_class_2020_avg_price'), - dcaClass2020Returns: createIndexes(this, '/dca_class_2020_returns'), - dcaClass2020Stack: createIndexes(this, '/dca_class_2020_stack'), - dcaClass2021AvgPrice: createIndexes(this, '/dca_class_2021_avg_price'), - dcaClass2021Returns: createIndexes(this, '/dca_class_2021_returns'), - dcaClass2021Stack: createIndexes(this, '/dca_class_2021_stack'), - dcaClass2022AvgPrice: createIndexes(this, '/dca_class_2022_avg_price'), - dcaClass2022Returns: createIndexes(this, '/dca_class_2022_returns'), - dcaClass2022Stack: createIndexes(this, '/dca_class_2022_stack'), - dcaClass2023AvgPrice: createIndexes(this, '/dca_class_2023_avg_price'), - dcaClass2023Returns: createIndexes(this, '/dca_class_2023_returns'), - dcaClass2023Stack: createIndexes(this, '/dca_class_2023_stack'), - dcaClass2024AvgPrice: createIndexes(this, '/dca_class_2024_avg_price'), - dcaClass2024Returns: createIndexes(this, '/dca_class_2024_returns'), - dcaClass2024Stack: createIndexes(this, '/dca_class_2024_stack'), - dcaClass2025AvgPrice: createIndexes(this, '/dca_class_2025_avg_price'), - dcaClass2025Returns: createIndexes(this, '/dca_class_2025_returns'), - dcaClass2025Stack: createIndexes(this, '/dca_class_2025_stack'), - maxDaysBetweenPriceAths: createIndexes(this, '/max_days_between_price_aths'), - maxYearsBetweenPriceAths: createIndexes(this, '/max_years_between_price_aths'), - price10yAgo: createIndexes(this, '/price_10y_ago'), - price13dEma: createPrice13dEmaPattern(this, 'price_13d_ema'), - price13dSma: createPrice13dEmaPattern(this, 'price_13d_sma'), - price144dEma: createPrice13dEmaPattern(this, 'price_144d_ema'), - price144dSma: createPrice13dEmaPattern(this, 'price_144d_sma'), - price1dAgo: createIndexes(this, '/price_1d_ago'), - price1mAgo: createIndexes(this, '/price_1m_ago'), - price1mEma: createPrice13dEmaPattern(this, 'price_1m_ema'), - price1mMax: createIndexes(this, '/price_1m_max'), - price1mMin: createIndexes(this, '/price_1m_min'), - price1mSma: createPrice13dEmaPattern(this, 'price_1m_sma'), - price1mVolatility: createIndexes(this, '/price_1m_volatility'), - price1wAgo: createIndexes(this, '/price_1w_ago'), - price1wEma: createPrice13dEmaPattern(this, 'price_1w_ema'), - price1wMax: createIndexes(this, '/price_1w_max'), - price1wMin: createIndexes(this, '/price_1w_min'), - price1wSma: createPrice13dEmaPattern(this, 'price_1w_sma'), - price1wVolatility: createIndexes(this, '/price_1w_volatility'), - price1yAgo: createIndexes(this, '/price_1y_ago'), - price1yEma: createPrice13dEmaPattern(this, 'price_1y_ema'), - price1yMax: createIndexes(this, '/price_1y_max'), - price1yMin: createIndexes(this, '/price_1y_min'), - price1ySma: createPrice13dEmaPattern(this, 'price_1y_sma'), - price1yVolatility: createIndexes(this, '/price_1y_volatility'), - price200dEma: createPrice13dEmaPattern(this, 'price_200d_ema'), - price200dSma: createPrice13dEmaPattern(this, 'price_200d_sma'), - price200dSmaX08: createIndexes(this, '/price_200d_sma_x0_8'), - price200dSmaX24: createIndexes(this, '/price_200d_sma_x2_4'), - price200wEma: createPrice13dEmaPattern(this, 'price_200w_ema'), - price200wSma: createPrice13dEmaPattern(this, 'price_200w_sma'), - price21dEma: createPrice13dEmaPattern(this, 'price_21d_ema'), - price21dSma: createPrice13dEmaPattern(this, 'price_21d_sma'), - price2wChoppinessIndex: createIndexes(this, '/price_2w_choppiness_index'), - price2wMax: createIndexes(this, '/price_2w_max'), - price2wMin: createIndexes(this, '/price_2w_min'), - price2yAgo: createIndexes(this, '/price_2y_ago'), - price2yEma: createPrice13dEmaPattern(this, 'price_2y_ema'), - price2ySma: createPrice13dEmaPattern(this, 'price_2y_sma'), - price34dEma: createPrice13dEmaPattern(this, 'price_34d_ema'), - price34dSma: createPrice13dEmaPattern(this, 'price_34d_sma'), - price3mAgo: createIndexes(this, '/price_3m_ago'), - price3yAgo: createIndexes(this, '/price_3y_ago'), - price4yAgo: createIndexes(this, '/price_4y_ago'), - price4yEma: createPrice13dEmaPattern(this, 'price_4y_ema'), - price4ySma: createPrice13dEmaPattern(this, 'price_4y_sma'), - price55dEma: createPrice13dEmaPattern(this, 'price_55d_ema'), - price55dSma: createPrice13dEmaPattern(this, 'price_55d_sma'), - price5yAgo: createIndexes(this, '/price_5y_ago'), - price6mAgo: createIndexes(this, '/price_6m_ago'), - price6yAgo: createIndexes(this, '/price_6y_ago'), - price89dEma: createPrice13dEmaPattern(this, 'price_89d_ema'), - price89dSma: createPrice13dEmaPattern(this, 'price_89d_sma'), - price8dEma: createPrice13dEmaPattern(this, 'price_8d_ema'), - price8dSma: createPrice13dEmaPattern(this, 'price_8d_sma'), - price8yAgo: createIndexes(this, '/price_8y_ago'), - priceAth: createIndexes26(this, '/price_ath'), - priceDrawdown: createIndexes26(this, '/price_drawdown'), - priceTrueRange: createIndexes5(this, '/price_true_range'), - priceTrueRange2wSum: createIndexes5(this, '/price_true_range_2w_sum') - }, - pools: { - pool: createIndexes2(this, '/pool'), - vecs: { - aXbt: createAXbtPattern(this, 'computed_pools_vecs/AXbt'), - aaoPool: createAXbtPattern(this, 'computed_pools_vecs/AaoPool'), - antPool: createAXbtPattern(this, 'computed_pools_vecs/AntPool'), - arkPool: createAXbtPattern(this, 'computed_pools_vecs/ArkPool'), - asicMiner: createAXbtPattern(this, 'computed_pools_vecs/AsicMiner'), - batPool: createAXbtPattern(this, 'computed_pools_vecs/BatPool'), - bcMonster: createAXbtPattern(this, 'computed_pools_vecs/BcMonster'), - bcpoolIo: createAXbtPattern(this, 'computed_pools_vecs/BcpoolIo'), - binancePool: createAXbtPattern(this, 'computed_pools_vecs/BinancePool'), - bitClub: createAXbtPattern(this, 'computed_pools_vecs/BitClub'), - bitFuFuPool: createAXbtPattern(this, 'computed_pools_vecs/BitFuFuPool'), - bitFury: createAXbtPattern(this, 'computed_pools_vecs/BitFury'), - bitMinter: createAXbtPattern(this, 'computed_pools_vecs/BitMinter'), - bitalo: createAXbtPattern(this, 'computed_pools_vecs/Bitalo'), - bitcoinAffiliateNetwork: createAXbtPattern(this, 'computed_pools_vecs/BitcoinAffiliateNetwork'), - bitcoinCom: createAXbtPattern(this, 'computed_pools_vecs/BitcoinCom'), - bitcoinIndia: createAXbtPattern(this, 'computed_pools_vecs/BitcoinIndia'), - bitcoinRussia: createAXbtPattern(this, 'computed_pools_vecs/BitcoinRussia'), - bitcoinUkraine: createAXbtPattern(this, 'computed_pools_vecs/BitcoinUkraine'), - bitfarms: createAXbtPattern(this, 'computed_pools_vecs/Bitfarms'), - bitparking: createAXbtPattern(this, 'computed_pools_vecs/Bitparking'), - bitsolo: createAXbtPattern(this, 'computed_pools_vecs/Bitsolo'), - bixin: createAXbtPattern(this, 'computed_pools_vecs/Bixin'), - blockFills: createAXbtPattern(this, 'computed_pools_vecs/BlockFills'), - braiinsPool: createAXbtPattern(this, 'computed_pools_vecs/BraiinsPool'), - bravoMining: createAXbtPattern(this, 'computed_pools_vecs/BravoMining'), - btPool: createAXbtPattern(this, 'computed_pools_vecs/BtPool'), - btcCom: createAXbtPattern(this, 'computed_pools_vecs/BtcCom'), - btcDig: createAXbtPattern(this, 'computed_pools_vecs/BtcDig'), - btcGuild: createAXbtPattern(this, 'computed_pools_vecs/BtcGuild'), - btcLab: createAXbtPattern(this, 'computed_pools_vecs/BtcLab'), - btcMp: createAXbtPattern(this, 'computed_pools_vecs/BtcMp'), - btcNuggets: createAXbtPattern(this, 'computed_pools_vecs/BtcNuggets'), - btcPoolParty: createAXbtPattern(this, 'computed_pools_vecs/BtcPoolParty'), - btcServ: createAXbtPattern(this, 'computed_pools_vecs/BtcServ'), - btcTop: createAXbtPattern(this, 'computed_pools_vecs/BtcTop'), - btcc: createAXbtPattern(this, 'computed_pools_vecs/Btcc'), - bwPool: createAXbtPattern(this, 'computed_pools_vecs/BwPool'), - bytePool: createAXbtPattern(this, 'computed_pools_vecs/BytePool'), - canoe: createAXbtPattern(this, 'computed_pools_vecs/Canoe'), - canoePool: createAXbtPattern(this, 'computed_pools_vecs/CanoePool'), - carbonNegative: createAXbtPattern(this, 'computed_pools_vecs/CarbonNegative'), - ckPool: createAXbtPattern(this, 'computed_pools_vecs/CkPool'), - cloudHashing: createAXbtPattern(this, 'computed_pools_vecs/CloudHashing'), - coinLab: createAXbtPattern(this, 'computed_pools_vecs/CoinLab'), - cointerra: createAXbtPattern(this, 'computed_pools_vecs/Cointerra'), - connectBtc: createAXbtPattern(this, 'computed_pools_vecs/ConnectBtc'), - dPool: createAXbtPattern(this, 'computed_pools_vecs/DPool'), - dcExploration: createAXbtPattern(this, 'computed_pools_vecs/DcExploration'), - dcex: createAXbtPattern(this, 'computed_pools_vecs/Dcex'), - digitalBtc: createAXbtPattern(this, 'computed_pools_vecs/DigitalBtc'), - digitalXMintsy: createAXbtPattern(this, 'computed_pools_vecs/DigitalXMintsy'), - eclipseMc: createAXbtPattern(this, 'computed_pools_vecs/EclipseMc'), - eightBaochi: createAXbtPattern(this, 'computed_pools_vecs/EightBaochi'), - ekanemBtc: createAXbtPattern(this, 'computed_pools_vecs/EkanemBtc'), - eligius: createAXbtPattern(this, 'computed_pools_vecs/Eligius'), - emcdPool: createAXbtPattern(this, 'computed_pools_vecs/EmcdPool'), - entrustCharityPool: createAXbtPattern(this, 'computed_pools_vecs/EntrustCharityPool'), - eobot: createAXbtPattern(this, 'computed_pools_vecs/Eobot'), - exxBw: createAXbtPattern(this, 'computed_pools_vecs/ExxBw'), - f2Pool: createAXbtPattern(this, 'computed_pools_vecs/F2Pool'), - fiftyEightCoin: createAXbtPattern(this, 'computed_pools_vecs/FiftyEightCoin'), - foundryUsa: createAXbtPattern(this, 'computed_pools_vecs/FoundryUsa'), - futureBitApolloSolo: createAXbtPattern(this, 'computed_pools_vecs/FutureBitApolloSolo'), - gbMiners: createAXbtPattern(this, 'computed_pools_vecs/GbMiners'), - ghashIo: createAXbtPattern(this, 'computed_pools_vecs/GhashIo'), - giveMeCoins: createAXbtPattern(this, 'computed_pools_vecs/GiveMeCoins'), - goGreenLight: createAXbtPattern(this, 'computed_pools_vecs/GoGreenLight'), - haoZhuZhu: createAXbtPattern(this, 'computed_pools_vecs/HaoZhuZhu'), - haominer: createAXbtPattern(this, 'computed_pools_vecs/Haominer'), - hashBx: createAXbtPattern(this, 'computed_pools_vecs/HashBx'), - hashPool: createAXbtPattern(this, 'computed_pools_vecs/HashPool'), - helix: createAXbtPattern(this, 'computed_pools_vecs/Helix'), - hhtt: createAXbtPattern(this, 'computed_pools_vecs/Hhtt'), - hotPool: createAXbtPattern(this, 'computed_pools_vecs/HotPool'), - hummerpool: createAXbtPattern(this, 'computed_pools_vecs/Hummerpool'), - huobiPool: createAXbtPattern(this, 'computed_pools_vecs/HuobiPool'), - innopolisTech: createAXbtPattern(this, 'computed_pools_vecs/InnopolisTech'), - kanoPool: createAXbtPattern(this, 'computed_pools_vecs/KanoPool'), - kncMiner: createAXbtPattern(this, 'computed_pools_vecs/KncMiner'), - kuCoinPool: createAXbtPattern(this, 'computed_pools_vecs/KuCoinPool'), - lubianCom: createAXbtPattern(this, 'computed_pools_vecs/LubianCom'), - luckyPool: createAXbtPattern(this, 'computed_pools_vecs/LuckyPool'), - luxor: createAXbtPattern(this, 'computed_pools_vecs/Luxor'), - maraPool: createAXbtPattern(this, 'computed_pools_vecs/MaraPool'), - maxBtc: createAXbtPattern(this, 'computed_pools_vecs/MaxBtc'), - maxiPool: createAXbtPattern(this, 'computed_pools_vecs/MaxiPool'), - megaBigPower: createAXbtPattern(this, 'computed_pools_vecs/MegaBigPower'), - minerium: createAXbtPattern(this, 'computed_pools_vecs/Minerium'), - miningCity: createAXbtPattern(this, 'computed_pools_vecs/MiningCity'), - miningDutch: createAXbtPattern(this, 'computed_pools_vecs/MiningDutch'), - miningKings: createAXbtPattern(this, 'computed_pools_vecs/MiningKings'), - miningSquared: createAXbtPattern(this, 'computed_pools_vecs/MiningSquared'), - mmpool: createAXbtPattern(this, 'computed_pools_vecs/Mmpool'), - mtRed: createAXbtPattern(this, 'computed_pools_vecs/MtRed'), - multiCoinCo: createAXbtPattern(this, 'computed_pools_vecs/MultiCoinCo'), - multipool: createAXbtPattern(this, 'computed_pools_vecs/Multipool'), - myBtcCoinPool: createAXbtPattern(this, 'computed_pools_vecs/MyBtcCoinPool'), - neopool: createAXbtPattern(this, 'computed_pools_vecs/Neopool'), - nexious: createAXbtPattern(this, 'computed_pools_vecs/Nexious'), - niceHash: createAXbtPattern(this, 'computed_pools_vecs/NiceHash'), - nmcBit: createAXbtPattern(this, 'computed_pools_vecs/NmcBit'), - novaBlock: createAXbtPattern(this, 'computed_pools_vecs/NovaBlock'), - ocean: createAXbtPattern(this, 'computed_pools_vecs/Ocean'), - okExPool: createAXbtPattern(this, 'computed_pools_vecs/OkExPool'), - okMiner: createAXbtPattern(this, 'computed_pools_vecs/OkMiner'), - okkong: createAXbtPattern(this, 'computed_pools_vecs/Okkong'), - okpoolTop: createAXbtPattern(this, 'computed_pools_vecs/OkpoolTop'), - oneHash: createAXbtPattern(this, 'computed_pools_vecs/OneHash'), - oneM1x: createAXbtPattern(this, 'computed_pools_vecs/OneM1x'), - oneThash: createAXbtPattern(this, 'computed_pools_vecs/OneThash'), - ozCoin: createAXbtPattern(this, 'computed_pools_vecs/OzCoin'), - pHashIo: createAXbtPattern(this, 'computed_pools_vecs/PHashIo'), - parasite: createAXbtPattern(this, 'computed_pools_vecs/Parasite'), - patels: createAXbtPattern(this, 'computed_pools_vecs/Patels'), - pegaPool: createAXbtPattern(this, 'computed_pools_vecs/PegaPool'), - phoenix: createAXbtPattern(this, 'computed_pools_vecs/Phoenix'), - polmine: createAXbtPattern(this, 'computed_pools_vecs/Polmine'), - pool175btc: createAXbtPattern(this, 'computed_pools_vecs/Pool175btc'), - pool50btc: createAXbtPattern(this, 'computed_pools_vecs/Pool50btc'), - poolin: createAXbtPattern(this, 'computed_pools_vecs/Poolin'), - portlandHodl: createAXbtPattern(this, 'computed_pools_vecs/PortlandHodl'), - publicPool: createAXbtPattern(this, 'computed_pools_vecs/PublicPool'), - pureBtcCom: createAXbtPattern(this, 'computed_pools_vecs/PureBtcCom'), - rawpool: createAXbtPattern(this, 'computed_pools_vecs/Rawpool'), - rigPool: createAXbtPattern(this, 'computed_pools_vecs/RigPool'), - sbiCrypto: createAXbtPattern(this, 'computed_pools_vecs/SbiCrypto'), - secPool: createAXbtPattern(this, 'computed_pools_vecs/SecPool'), - secretSuperstar: createAXbtPattern(this, 'computed_pools_vecs/SecretSuperstar'), - sevenPool: createAXbtPattern(this, 'computed_pools_vecs/SevenPool'), - shawnP0wers: createAXbtPattern(this, 'computed_pools_vecs/ShawnP0wers'), - sigmapoolCom: createAXbtPattern(this, 'computed_pools_vecs/SigmapoolCom'), - simplecoinUs: createAXbtPattern(this, 'computed_pools_vecs/SimplecoinUs'), - soloCk: createAXbtPattern(this, 'computed_pools_vecs/SoloCk'), - spiderPool: createAXbtPattern(this, 'computed_pools_vecs/SpiderPool'), - stMiningCorp: createAXbtPattern(this, 'computed_pools_vecs/StMiningCorp'), - tangpool: createAXbtPattern(this, 'computed_pools_vecs/Tangpool'), - tatmasPool: createAXbtPattern(this, 'computed_pools_vecs/TatmasPool'), - tbDice: createAXbtPattern(this, 'computed_pools_vecs/TbDice'), - telco214: createAXbtPattern(this, 'computed_pools_vecs/Telco214'), - terraPool: createAXbtPattern(this, 'computed_pools_vecs/TerraPool'), - tiger: createAXbtPattern(this, 'computed_pools_vecs/Tiger'), - tigerpoolNet: createAXbtPattern(this, 'computed_pools_vecs/TigerpoolNet'), - titan: createAXbtPattern(this, 'computed_pools_vecs/Titan'), - transactionCoinMining: createAXbtPattern(this, 'computed_pools_vecs/TransactionCoinMining'), - trickysBtcPool: createAXbtPattern(this, 'computed_pools_vecs/TrickysBtcPool'), - tripleMining: createAXbtPattern(this, 'computed_pools_vecs/TripleMining'), - twentyOneInc: createAXbtPattern(this, 'computed_pools_vecs/TwentyOneInc'), - ultimusPool: createAXbtPattern(this, 'computed_pools_vecs/UltimusPool'), - unknown: createAXbtPattern(this, 'computed_pools_vecs/Unknown'), - unomp: createAXbtPattern(this, 'computed_pools_vecs/Unomp'), - viaBtc: createAXbtPattern(this, 'computed_pools_vecs/ViaBtc'), - waterhole: createAXbtPattern(this, 'computed_pools_vecs/Waterhole'), - wayiCn: createAXbtPattern(this, 'computed_pools_vecs/WayiCn'), - whitePool: createAXbtPattern(this, 'computed_pools_vecs/WhitePool'), - wk057: createAXbtPattern(this, 'computed_pools_vecs/Wk057'), - yourbtcNet: createAXbtPattern(this, 'computed_pools_vecs/YourbtcNet'), - zulupool: createAXbtPattern(this, 'computed_pools_vecs/Zulupool') - } - }, - price: { - priceClose: createIndexes3(this, '/price_close'), - priceCloseInCents: createIndexes13(this, '/price_close_in_cents'), - priceCloseInSats: createIndexes3(this, '/price_close_in_sats'), - priceHigh: createIndexes3(this, '/price_high'), - priceHighInCents: createIndexes13(this, '/price_high_in_cents'), - priceHighInSats: createIndexes3(this, '/price_high_in_sats'), - priceLow: createIndexes3(this, '/price_low'), - priceLowInCents: createIndexes13(this, '/price_low_in_cents'), - priceLowInSats: createIndexes3(this, '/price_low_in_sats'), - priceOhlc: createIndexes3(this, '/price_ohlc'), - priceOhlcInSats: createIndexes3(this, '/price_ohlc_in_sats'), - priceOpen: createIndexes3(this, '/price_open'), - priceOpenInCents: createIndexes13(this, '/price_open_in_cents'), - priceOpenInSats: createIndexes3(this, '/price_open_in_sats') - }, - stateful: { - addrCount: createIndexes3(this, '/addr_count'), + distribution: { + addrCount: createMetricPattern1(this, 'addr_count'), addressCohorts: { amountRange: { - _0sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_0sats'), - _100btcTo1kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_100btc_to_1k_btc'), - _100kBtcOrMore: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_100k_btc_or_more'), - _100kSatsTo1mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_100k_sats_to_1m_sats'), - _100satsTo1kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_100sats_to_1k_sats'), - _10btcTo100btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10btc_to_100btc'), - _10kBtcTo100kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10k_btc_to_100k_btc'), - _10kSatsTo100kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10k_sats_to_100k_sats'), - _10mSatsTo1btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10m_sats_to_1btc'), - _10satsTo100sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10sats_to_100sats'), - _1btcTo10btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1btc_to_10btc'), - _1kBtcTo10kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1k_btc_to_10k_btc'), - _1kSatsTo10kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1k_sats_to_10k_sats'), - _1mSatsTo10mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1m_sats_to_10m_sats'), - _1satTo10sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1sat_to_10sats') + _0sats: create_0satsPattern(this, 'addrs_with_0sats'), + _100btcTo1kBtc: create_0satsPattern(this, 'addrs_above_100btc_under_1k_btc'), + _100kBtcOrMore: create_0satsPattern(this, 'addrs_above_100k_btc'), + _100kSatsTo1mSats: create_0satsPattern(this, 'addrs_above_100k_sats_under_1m_sats'), + _100satsTo1kSats: create_0satsPattern(this, 'addrs_above_100sats_under_1k_sats'), + _10btcTo100btc: create_0satsPattern(this, 'addrs_above_10btc_under_100btc'), + _10kBtcTo100kBtc: create_0satsPattern(this, 'addrs_above_10k_btc_under_100k_btc'), + _10kSatsTo100kSats: create_0satsPattern(this, 'addrs_above_10k_sats_under_100k_sats'), + _10mSatsTo1btc: create_0satsPattern(this, 'addrs_above_10m_sats_under_1btc'), + _10satsTo100sats: create_0satsPattern(this, 'addrs_above_10sats_under_100sats'), + _1btcTo10btc: create_0satsPattern(this, 'addrs_above_1btc_under_10btc'), + _1kBtcTo10kBtc: create_0satsPattern(this, 'addrs_above_1k_btc_under_10k_btc'), + _1kSatsTo10kSats: create_0satsPattern(this, 'addrs_above_1k_sats_under_10k_sats'), + _1mSatsTo10mSats: create_0satsPattern(this, 'addrs_above_1m_sats_under_10m_sats'), + _1satTo10sats: create_0satsPattern(this, 'addrs_above_1sat_under_10sats') }, geAmount: { - _100btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_100btc'), - _100kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_100k_sats'), - _100sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_100sats'), - _10btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10btc'), - _10kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10k_btc'), - _10kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10k_sats'), - _10mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10m_sats'), - _10sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10sats'), - _1btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1btc'), - _1kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1k_btc'), - _1kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1k_sats'), - _1mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1m_sats'), - _1sat: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1sat') + _100btc: create_0satsPattern(this, 'addrs_above_100btc'), + _100kSats: create_0satsPattern(this, 'addrs_above_100k_sats'), + _100sats: create_0satsPattern(this, 'addrs_above_100sats'), + _10btc: create_0satsPattern(this, 'addrs_above_10btc'), + _10kBtc: create_0satsPattern(this, 'addrs_above_10k_btc'), + _10kSats: create_0satsPattern(this, 'addrs_above_10k_sats'), + _10mSats: create_0satsPattern(this, 'addrs_above_10m_sats'), + _10sats: create_0satsPattern(this, 'addrs_above_10sats'), + _1btc: create_0satsPattern(this, 'addrs_above_1btc'), + _1kBtc: create_0satsPattern(this, 'addrs_above_1k_btc'), + _1kSats: create_0satsPattern(this, 'addrs_above_1k_sats'), + _1mSats: create_0satsPattern(this, 'addrs_above_1m_sats'), + _1sat: create_0satsPattern(this, 'addrs_above_1sat') }, ltAmount: { - _100btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_100btc'), - _100kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_100k_btc'), - _100kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_100k_sats'), - _100sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_100sats'), - _10btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10btc'), - _10kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10k_btc'), - _10kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10k_sats'), - _10mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10m_sats'), - _10sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10sats'), - _1btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_1btc'), - _1kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_1k_btc'), - _1kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_1k_sats'), - _1mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_1m_sats') + _100btc: create_0satsPattern(this, 'addrs_under_100btc'), + _100kBtc: create_0satsPattern(this, 'addrs_under_100k_btc'), + _100kSats: create_0satsPattern(this, 'addrs_under_100k_sats'), + _100sats: create_0satsPattern(this, 'addrs_under_100sats'), + _10btc: create_0satsPattern(this, 'addrs_under_10btc'), + _10kBtc: create_0satsPattern(this, 'addrs_under_10k_btc'), + _10kSats: create_0satsPattern(this, 'addrs_under_10k_sats'), + _10mSats: create_0satsPattern(this, 'addrs_under_10m_sats'), + _10sats: create_0satsPattern(this, 'addrs_under_10sats'), + _1btc: create_0satsPattern(this, 'addrs_under_1btc'), + _1kBtc: create_0satsPattern(this, 'addrs_under_1k_btc'), + _1kSats: create_0satsPattern(this, 'addrs_under_1k_sats'), + _1mSats: create_0satsPattern(this, 'addrs_under_1m_sats') } }, addressesData: { - empty: createIndexes29(this, '/emptyaddressdata'), - loaded: createIndexes30(this, '/loadedaddressdata') + empty: createMetricPattern46(this, 'emptyaddressdata'), + loaded: createMetricPattern45(this, 'loadedaddressdata') }, - addresstypeToHeightToAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/addresstype_to_height_to_addr_count'), - addresstypeToHeightToEmptyAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/addresstype_to_height_to_empty_addr_count'), - addresstypeToIndexesToAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/addresstype_to_indexes_to_addr_count'), - addresstypeToIndexesToEmptyAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/addresstype_to_indexes_to_empty_addr_count'), - anyAddressIndexes: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/any_address_indexes'), - chainState: createIndexes2(this, '/chain'), - emptyAddrCount: createIndexes3(this, '/empty_addr_count'), - emptyaddressindex: createIndexes29(this, '/emptyaddressindex'), - loadedaddressindex: createIndexes30(this, '/loadedaddressindex'), - marketCap: createIndexes26(this, '/market_cap'), - opreturnSupply: createSupplyPattern(this, 'computed_stateful/opreturn_supply'), - unspendableSupply: createSupplyPattern(this, 'computed_stateful/unspendable_supply'), + addresstypeToHeightToAddrCount: createAddresstypeToHeightToAddrCountPattern(this, ''), + addresstypeToHeightToEmptyAddrCount: createAddresstypeToHeightToAddrCountPattern(this, ''), + addresstypeToIndexesToAddrCount: createAddresstypeToHeightToAddrCountPattern(this, ''), + addresstypeToIndexesToEmptyAddrCount: createAddresstypeToHeightToAddrCountPattern(this, ''), + anyAddressIndexes: createAddresstypeToHeightToAddrCountPattern(this, 'anyaddressindex'), + chainState: createMetricPattern25(this, 'chain'), + emptyAddrCount: createMetricPattern1(this, 'empty_addr_count'), + emptyaddressindex: createMetricPattern46(this, 'emptyaddressindex'), + loadedaddressindex: createMetricPattern45(this, 'loadedaddressindex'), utxoCohorts: { ageRange: { - _10yTo12y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_10y_to_12y'), - _12yTo15y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_12y_to_15y'), - _1dTo1w: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_1d_to_1w'), - _1mTo2m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_1m_to_2m'), - _1wTo1m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_1w_to_1m'), - _1yTo2y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_1y_to_2y'), - _2mTo3m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_2m_to_3m'), - _2yTo3y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_2y_to_3y'), - _3mTo4m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_3m_to_4m'), - _3yTo4y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_3y_to_4y'), - _4mTo5m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_4m_to_5m'), - _4yTo5y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_4y_to_5y'), - _5mTo6m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_5m_to_6m'), - _5yTo6y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_5y_to_6y'), - _6mTo1y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_6m_to_1y'), - _6yTo7y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_6y_to_7y'), - _7yTo8y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_7y_to_8y'), - _8yTo10y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_8y_to_10y'), - from15y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/from_15y'), - upTo1d: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_age_range/up_to_1d') + _10yTo12y: create_10yTo12yPattern(this, 'utxos_at_least_10y_up_to_12y_old'), + _12yTo15y: create_10yTo12yPattern(this, 'utxos_at_least_12y_up_to_15y_old'), + _1dTo1w: create_10yTo12yPattern(this, 'utxos_at_least_1d_up_to_1w_old'), + _1mTo2m: create_10yTo12yPattern(this, 'utxos_at_least_1m_up_to_2m_old'), + _1wTo1m: create_10yTo12yPattern(this, 'utxos_at_least_1w_up_to_1m_old'), + _1yTo2y: create_10yTo12yPattern(this, 'utxos_at_least_1y_up_to_2y_old'), + _2mTo3m: create_10yTo12yPattern(this, 'utxos_at_least_2m_up_to_3m_old'), + _2yTo3y: create_10yTo12yPattern(this, 'utxos_at_least_2y_up_to_3y_old'), + _3mTo4m: create_10yTo12yPattern(this, 'utxos_at_least_3m_up_to_4m_old'), + _3yTo4y: create_10yTo12yPattern(this, 'utxos_at_least_3y_up_to_4y_old'), + _4mTo5m: create_10yTo12yPattern(this, 'utxos_at_least_4m_up_to_5m_old'), + _4yTo5y: create_10yTo12yPattern(this, 'utxos_at_least_4y_up_to_5y_old'), + _5mTo6m: create_10yTo12yPattern(this, 'utxos_at_least_5m_up_to_6m_old'), + _5yTo6y: create_10yTo12yPattern(this, 'utxos_at_least_5y_up_to_6y_old'), + _6mTo1y: create_10yTo12yPattern(this, 'utxos_at_least_6m_up_to_1y_old'), + _6yTo7y: create_10yTo12yPattern(this, 'utxos_at_least_6y_up_to_7y_old'), + _7yTo8y: create_10yTo12yPattern(this, 'utxos_at_least_7y_up_to_8y_old'), + _8yTo10y: create_10yTo12yPattern(this, 'utxos_at_least_8y_up_to_10y_old'), + from15y: create_10yTo12yPattern(this, 'utxos_at_least_15y_old'), + upTo1d: createUpTo1dPattern(this, 'utxos_up_to_1d_old') }, all: { - activity: createActivityPattern(this, 'computed_stateful_utxo_cohorts_all/activity'), - pricePaid: createPricePaidPattern2(this, 'computed_stateful_utxo_cohorts_all/price_paid'), - realized: createRealizedPattern3(this, 'computed_stateful_utxo_cohorts_all/realized'), + activity: createActivityPattern2(this, ''), + costBasis: createCostBasisPattern2(this, ''), + realized: createRealizedPattern3(this, ''), relative: { - negUnrealizedLossRelToOwnTotalUnrealizedPnl: createIndexes27(this, '/neg_unrealized_loss_rel_to_own_total_unrealized_pnl'), - netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createIndexes26(this, '/net_unrealized_pnl_rel_to_own_total_unrealized_pnl'), - supplyInLossRelToOwnSupply: createIndexes27(this, '/supply_in_loss_rel_to_own_supply'), - supplyInProfitRelToOwnSupply: createIndexes27(this, '/supply_in_profit_rel_to_own_supply'), - unrealizedLossRelToOwnTotalUnrealizedPnl: createIndexes27(this, '/unrealized_loss_rel_to_own_total_unrealized_pnl'), - unrealizedProfitRelToOwnTotalUnrealizedPnl: createIndexes27(this, '/unrealized_profit_rel_to_own_total_unrealized_pnl') + negUnrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern5(this, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl'), + netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createMetricPattern3(this, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl'), + supplyInLossRelToOwnSupply: createMetricPattern5(this, 'supply_in_loss_rel_to_own_supply'), + supplyInProfitRelToOwnSupply: createMetricPattern5(this, 'supply_in_profit_rel_to_own_supply'), + unrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern5(this, 'unrealized_loss_rel_to_own_total_unrealized_pnl'), + unrealizedProfitRelToOwnTotalUnrealizedPnl: createMetricPattern5(this, 'unrealized_profit_rel_to_own_total_unrealized_pnl') }, - supply: createSupplyPattern2(this, 'computed_stateful_utxo_cohorts_all/supply'), - unrealized: createUnrealizedPattern(this, 'computed_stateful_utxo_cohorts_all/unrealized') + supply: createSupplyPattern3(this, ''), + unrealized: createUnrealizedPattern(this, '') }, amountRange: { - _0sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_0sats'), - _100btcTo1kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_100btc_to_1k_btc'), - _100kBtcOrMore: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_100k_btc_or_more'), - _100kSatsTo1mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_100k_sats_to_1m_sats'), - _100satsTo1kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_100sats_to_1k_sats'), - _10btcTo100btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10btc_to_100btc'), - _10kBtcTo100kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10k_btc_to_100k_btc'), - _10kSatsTo100kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10k_sats_to_100k_sats'), - _10mSatsTo1btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10m_sats_to_1btc'), - _10satsTo100sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10sats_to_100sats'), - _1btcTo10btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1btc_to_10btc'), - _1kBtcTo10kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1k_btc_to_10k_btc'), - _1kSatsTo10kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1k_sats_to_10k_sats'), - _1mSatsTo10mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1m_sats_to_10m_sats'), - _1satTo10sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1sat_to_10sats') + _0sats: create_0satsPattern2(this, 'utxos_with_0sats'), + _100btcTo1kBtc: create_0satsPattern2(this, 'utxos_above_100btc_under_1k_btc'), + _100kBtcOrMore: create_0satsPattern2(this, 'utxos_above_100k_btc'), + _100kSatsTo1mSats: create_0satsPattern2(this, 'utxos_above_100k_sats_under_1m_sats'), + _100satsTo1kSats: create_0satsPattern2(this, 'utxos_above_100sats_under_1k_sats'), + _10btcTo100btc: create_0satsPattern2(this, 'utxos_above_10btc_under_100btc'), + _10kBtcTo100kBtc: create_0satsPattern2(this, 'utxos_above_10k_btc_under_100k_btc'), + _10kSatsTo100kSats: create_0satsPattern2(this, 'utxos_above_10k_sats_under_100k_sats'), + _10mSatsTo1btc: create_0satsPattern2(this, 'utxos_above_10m_sats_under_1btc'), + _10satsTo100sats: create_0satsPattern2(this, 'utxos_above_10sats_under_100sats'), + _1btcTo10btc: create_0satsPattern2(this, 'utxos_above_1btc_under_10btc'), + _1kBtcTo10kBtc: create_0satsPattern2(this, 'utxos_above_1k_btc_under_10k_btc'), + _1kSatsTo10kSats: create_0satsPattern2(this, 'utxos_above_1k_sats_under_10k_sats'), + _1mSatsTo10mSats: create_0satsPattern2(this, 'utxos_above_1m_sats_under_10m_sats'), + _1satTo10sats: create_0satsPattern2(this, 'utxos_above_1sat_under_10sats') }, epoch: { - _0: create_10yTo12yPattern(this, 'epoch/_0'), - _1: create_10yTo12yPattern(this, 'epoch/_1'), - _2: create_10yTo12yPattern(this, 'epoch/_2'), - _3: create_10yTo12yPattern(this, 'epoch/_3'), - _4: create_10yTo12yPattern(this, 'epoch/_4') + _0: create_10yTo12yPattern(this, 'epoch_0'), + _1: create_10yTo12yPattern(this, 'epoch_1'), + _2: create_10yTo12yPattern(this, 'epoch_2'), + _3: create_10yTo12yPattern(this, 'epoch_3'), + _4: create_10yTo12yPattern(this, 'epoch_4') }, geAmount: { - _100btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_100btc'), - _100kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_100k_sats'), - _100sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_100sats'), - _10btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10btc'), - _10kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10k_btc'), - _10kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10k_sats'), - _10mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10m_sats'), - _10sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10sats'), - _1btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1btc'), - _1kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1k_btc'), - _1kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1k_sats'), - _1mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1m_sats'), - _1sat: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1sat') + _100btc: create_0satsPattern2(this, 'utxos_above_100btc'), + _100kSats: create_0satsPattern2(this, 'utxos_above_100k_sats'), + _100sats: create_0satsPattern2(this, 'utxos_above_100sats'), + _10btc: create_0satsPattern2(this, 'utxos_above_10btc'), + _10kBtc: create_0satsPattern2(this, 'utxos_above_10k_btc'), + _10kSats: create_0satsPattern2(this, 'utxos_above_10k_sats'), + _10mSats: create_0satsPattern2(this, 'utxos_above_10m_sats'), + _10sats: create_0satsPattern2(this, 'utxos_above_10sats'), + _1btc: create_0satsPattern2(this, 'utxos_above_1btc'), + _1kBtc: create_0satsPattern2(this, 'utxos_above_1k_btc'), + _1kSats: create_0satsPattern2(this, 'utxos_above_1k_sats'), + _1mSats: create_0satsPattern2(this, 'utxos_above_1m_sats'), + _1sat: create_0satsPattern2(this, 'utxos_above_1sat') }, ltAmount: { - _100btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_100btc'), - _100kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_100k_btc'), - _100kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_100k_sats'), - _100sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_100sats'), - _10btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10btc'), - _10kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10k_btc'), - _10kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10k_sats'), - _10mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10m_sats'), - _10sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10sats'), - _1btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_1btc'), - _1kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_1k_btc'), - _1kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_1k_sats'), - _1mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_1m_sats') + _100btc: create_0satsPattern2(this, 'utxos_under_100btc'), + _100kBtc: create_0satsPattern2(this, 'utxos_under_100k_btc'), + _100kSats: create_0satsPattern2(this, 'utxos_under_100k_sats'), + _100sats: create_0satsPattern2(this, 'utxos_under_100sats'), + _10btc: create_0satsPattern2(this, 'utxos_under_10btc'), + _10kBtc: create_0satsPattern2(this, 'utxos_under_10k_btc'), + _10kSats: create_0satsPattern2(this, 'utxos_under_10k_sats'), + _10mSats: create_0satsPattern2(this, 'utxos_under_10m_sats'), + _10sats: create_0satsPattern2(this, 'utxos_under_10sats'), + _1btc: create_0satsPattern2(this, 'utxos_under_1btc'), + _1kBtc: create_0satsPattern2(this, 'utxos_under_1k_btc'), + _1kSats: create_0satsPattern2(this, 'utxos_under_1k_sats'), + _1mSats: create_0satsPattern2(this, 'utxos_under_1m_sats') }, maxAge: { - _10y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_10y'), - _12y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_12y'), - _15y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_15y'), - _1m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_1m'), - _1w: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_1w'), - _1y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_1y'), - _2m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_2m'), - _2y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_2y'), - _3m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_3m'), - _3y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_3y'), - _4m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_4m'), - _4y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_4y'), - _5m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_5m'), - _5y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_5y'), - _6m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_6m'), - _6y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_6y'), - _7y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_7y'), - _8y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_8y') + _10y: createUpTo1dPattern(this, 'utxos_up_to_10y_old'), + _12y: createUpTo1dPattern(this, 'utxos_up_to_12y_old'), + _15y: createUpTo1dPattern(this, 'utxos_up_to_15y_old'), + _1m: createUpTo1dPattern(this, 'utxos_up_to_1m_old'), + _1w: createUpTo1dPattern(this, 'utxos_up_to_1w_old'), + _1y: createUpTo1dPattern(this, 'utxos_up_to_1y_old'), + _2m: createUpTo1dPattern(this, 'utxos_up_to_2m_old'), + _2y: createUpTo1dPattern(this, 'utxos_up_to_2y_old'), + _3m: createUpTo1dPattern(this, 'utxos_up_to_3m_old'), + _3y: createUpTo1dPattern(this, 'utxos_up_to_3y_old'), + _4m: createUpTo1dPattern(this, 'utxos_up_to_4m_old'), + _4y: createUpTo1dPattern(this, 'utxos_up_to_4y_old'), + _5m: createUpTo1dPattern(this, 'utxos_up_to_5m_old'), + _5y: createUpTo1dPattern(this, 'utxos_up_to_5y_old'), + _6m: createUpTo1dPattern(this, 'utxos_up_to_6m_old'), + _6y: createUpTo1dPattern(this, 'utxos_up_to_6y_old'), + _7y: createUpTo1dPattern(this, 'utxos_up_to_7y_old'), + _8y: createUpTo1dPattern(this, 'utxos_up_to_8y_old') }, minAge: { - _10y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_10y'), - _12y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_12y'), - _1d: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_1d'), - _1m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_1m'), - _1w: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_1w'), - _1y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_1y'), - _2m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_2m'), - _2y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_2y'), - _3m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_3m'), - _3y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_3y'), - _4m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_4m'), - _4y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_4y'), - _5m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_5m'), - _5y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_5y'), - _6m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_6m'), - _6y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_6y'), - _7y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_7y'), - _8y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_8y') + _10y: create_10yTo12yPattern(this, 'utxos_at_least_10y_old'), + _12y: create_10yTo12yPattern(this, 'utxos_at_least_12y_old'), + _1d: create_10yTo12yPattern(this, 'utxos_at_least_1d_old'), + _1m: create_10yTo12yPattern(this, 'utxos_at_least_1m_old'), + _1w: create_10yTo12yPattern(this, 'utxos_at_least_1w_old'), + _1y: create_10yTo12yPattern(this, 'utxos_at_least_1y_old'), + _2m: create_10yTo12yPattern(this, 'utxos_at_least_2m_old'), + _2y: create_10yTo12yPattern(this, 'utxos_at_least_2y_old'), + _3m: create_10yTo12yPattern(this, 'utxos_at_least_3m_old'), + _3y: create_10yTo12yPattern(this, 'utxos_at_least_3y_old'), + _4m: create_10yTo12yPattern(this, 'utxos_at_least_4m_old'), + _4y: create_10yTo12yPattern(this, 'utxos_at_least_4y_old'), + _5m: create_10yTo12yPattern(this, 'utxos_at_least_5m_old'), + _5y: create_10yTo12yPattern(this, 'utxos_at_least_5y_old'), + _6m: create_10yTo12yPattern(this, 'utxos_at_least_6m_old'), + _6y: create_10yTo12yPattern(this, 'utxos_at_least_6y_old'), + _7y: create_10yTo12yPattern(this, 'utxos_at_least_7y_old'), + _8y: create_10yTo12yPattern(this, 'utxos_at_least_8y_old') }, term: { - long: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_term/long'), - short: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_term/short') + long: createUpTo1dPattern(this, 'lth'), + short: createUpTo1dPattern(this, 'sth') }, type: { - empty: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/empty'), - p2a: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2a'), - p2ms: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2ms'), - p2pk33: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2pk33'), - p2pk65: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2pk65'), - p2pkh: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2pkh'), - p2sh: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2sh'), - p2tr: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2tr'), - p2wpkh: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2wpkh'), - p2wsh: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2wsh'), - unknown: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/unknown') + empty: create_0satsPattern2(this, 'empty_outputs'), + p2a: create_0satsPattern2(this, 'p2a'), + p2ms: create_0satsPattern2(this, 'p2ms'), + p2pk33: create_0satsPattern2(this, 'p2pk33'), + p2pk65: create_0satsPattern2(this, 'p2pk65'), + p2pkh: create_0satsPattern2(this, 'p2pkh'), + p2sh: create_0satsPattern2(this, 'p2sh'), + p2tr: create_0satsPattern2(this, 'p2tr'), + p2wpkh: create_0satsPattern2(this, 'p2wpkh'), + p2wsh: create_0satsPattern2(this, 'p2wsh'), + unknown: create_0satsPattern2(this, 'unknown_outputs') }, year: { - _2009: create_10yTo12yPattern(this, 'year/_2009'), - _2010: create_10yTo12yPattern(this, 'year/_2010'), - _2011: create_10yTo12yPattern(this, 'year/_2011'), - _2012: create_10yTo12yPattern(this, 'year/_2012'), - _2013: create_10yTo12yPattern(this, 'year/_2013'), - _2014: create_10yTo12yPattern(this, 'year/_2014'), - _2015: create_10yTo12yPattern(this, 'year/_2015'), - _2016: create_10yTo12yPattern(this, 'year/_2016'), - _2017: create_10yTo12yPattern(this, 'year/_2017'), - _2018: create_10yTo12yPattern(this, 'year/_2018'), - _2019: create_10yTo12yPattern(this, 'year/_2019'), - _2020: create_10yTo12yPattern(this, 'year/_2020'), - _2021: create_10yTo12yPattern(this, 'year/_2021'), - _2022: create_10yTo12yPattern(this, 'year/_2022'), - _2023: create_10yTo12yPattern(this, 'year/_2023'), - _2024: create_10yTo12yPattern(this, 'year/_2024'), - _2025: create_10yTo12yPattern(this, 'year/_2025'), - _2026: create_10yTo12yPattern(this, 'year/_2026') + _2009: create_10yTo12yPattern(this, 'year_2009'), + _2010: create_10yTo12yPattern(this, 'year_2010'), + _2011: create_10yTo12yPattern(this, 'year_2011'), + _2012: create_10yTo12yPattern(this, 'year_2012'), + _2013: create_10yTo12yPattern(this, 'year_2013'), + _2014: create_10yTo12yPattern(this, 'year_2014'), + _2015: create_10yTo12yPattern(this, 'year_2015'), + _2016: create_10yTo12yPattern(this, 'year_2016'), + _2017: create_10yTo12yPattern(this, 'year_2017'), + _2018: create_10yTo12yPattern(this, 'year_2018'), + _2019: create_10yTo12yPattern(this, 'year_2019'), + _2020: create_10yTo12yPattern(this, 'year_2020'), + _2021: create_10yTo12yPattern(this, 'year_2021'), + _2022: create_10yTo12yPattern(this, 'year_2022'), + _2023: create_10yTo12yPattern(this, 'year_2023'), + _2024: create_10yTo12yPattern(this, 'year_2024'), + _2025: create_10yTo12yPattern(this, 'year_2025'), + _2026: create_10yTo12yPattern(this, 'year_2026') } } }, - txins: { - txoutindex: createIndexes24(this, '/txoutindex'), - value: createIndexes24(this, '/value') + indexes: { + address: { + emptyoutputindex: createMetricPattern24(this, 'emptyoutputindex'), + opreturnindex: createMetricPattern28(this, 'opreturnindex'), + p2aaddressindex: createMetricPattern30(this, 'p2aaddressindex'), + p2msoutputindex: createMetricPattern31(this, 'p2msoutputindex'), + p2pk33addressindex: createMetricPattern32(this, 'p2pk33addressindex'), + p2pk65addressindex: createMetricPattern33(this, 'p2pk65addressindex'), + p2pkhaddressindex: createMetricPattern34(this, 'p2pkhaddressindex'), + p2shaddressindex: createMetricPattern35(this, 'p2shaddressindex'), + p2traddressindex: createMetricPattern36(this, 'p2traddressindex'), + p2wpkhaddressindex: createMetricPattern37(this, 'p2wpkhaddressindex'), + p2wshaddressindex: createMetricPattern38(this, 'p2wshaddressindex'), + unknownoutputindex: createMetricPattern42(this, 'unknownoutputindex') + }, + block: { + dateindex: createMetricPattern25(this, 'dateindex'), + difficultyepoch: createMetricPattern14(this, 'difficultyepoch'), + firstHeight: createMetricPattern13(this, 'first_height'), + halvingepoch: createMetricPattern15(this, 'halvingepoch'), + height: createMetricPattern25(this, 'height'), + heightCount: createMetricPattern23(this, 'height_count'), + txindexCount: createMetricPattern25(this, 'txindex_count') + }, + time: { + date: createMetricPattern21(this, 'date'), + dateindex: createMetricPattern21(this, 'dateindex'), + dateindexCount: createMetricPattern19(this, 'dateindex_count'), + decadeindex: createMetricPattern12(this, 'decadeindex'), + firstDateindex: createMetricPattern19(this, 'first_dateindex'), + firstHeight: createMetricPattern21(this, 'first_height'), + firstMonthindex: createMetricPattern8(this, 'first_monthindex'), + firstYearindex: createMetricPattern22(this, 'first_yearindex'), + heightCount: createMetricPattern21(this, 'height_count'), + monthindex: createMetricPattern10(this, 'monthindex'), + monthindexCount: createMetricPattern8(this, 'monthindex_count'), + quarterindex: createMetricPattern17(this, 'quarterindex'), + semesterindex: createMetricPattern18(this, 'semesterindex'), + weekindex: createMetricPattern11(this, 'weekindex'), + yearindex: createMetricPattern20(this, 'yearindex'), + yearindexCount: createMetricPattern22(this, 'yearindex_count') + }, + transaction: { + inputCount: createMetricPattern41(this, 'input_count'), + outputCount: createMetricPattern41(this, 'output_count'), + txindex: createMetricPattern41(this, 'txindex'), + txinindex: createMetricPattern26(this, 'txinindex'), + txoutindex: createMetricPattern29(this, 'txoutindex') + } }, - txouts: { - txinindex: createIndexes25(this, '/txinindex') + inputs: { + count: { + count: createBlockSizePattern(this, 'input_count') + }, + spent: { + txoutindex: createMetricPattern26(this, 'txoutindex'), + value: createMetricPattern26(this, 'value') + } + }, + market: { + ath: { + daysSincePriceAth: createMetricPattern4(this, 'days_since_price_ath'), + maxDaysBetweenPriceAths: createMetricPattern4(this, 'max_days_between_price_aths'), + maxYearsBetweenPriceAths: createMetricPattern4(this, 'max_years_between_price_aths'), + priceAth: createMetricPattern3(this, 'price_ath'), + priceDrawdown: createMetricPattern3(this, 'price_drawdown') + }, + dca: { + _10yDcaAvgPrice: createMetricPattern4(this, '10y_dca_avg_price'), + _10yDcaCagr: createMetricPattern4(this, '10y_dca_cagr'), + _10yDcaReturns: createMetricPattern4(this, '10y_dca_returns'), + _10yDcaStack: createMetricPattern4(this, '10y_dca_stack'), + _1mDcaAvgPrice: createMetricPattern4(this, '1m_dca_avg_price'), + _1mDcaReturns: createMetricPattern4(this, '1m_dca_returns'), + _1mDcaStack: createMetricPattern4(this, '1m_dca_stack'), + _1wDcaAvgPrice: createMetricPattern4(this, '1w_dca_avg_price'), + _1wDcaReturns: createMetricPattern4(this, '1w_dca_returns'), + _1wDcaStack: createMetricPattern4(this, '1w_dca_stack'), + _1yDcaAvgPrice: createMetricPattern4(this, '1y_dca_avg_price'), + _1yDcaReturns: createMetricPattern4(this, '1y_dca_returns'), + _1yDcaStack: createMetricPattern4(this, '1y_dca_stack'), + _2yDcaAvgPrice: createMetricPattern4(this, '2y_dca_avg_price'), + _2yDcaCagr: createMetricPattern4(this, '2y_dca_cagr'), + _2yDcaReturns: createMetricPattern4(this, '2y_dca_returns'), + _2yDcaStack: createMetricPattern4(this, '2y_dca_stack'), + _3mDcaAvgPrice: createMetricPattern4(this, '3m_dca_avg_price'), + _3mDcaReturns: createMetricPattern4(this, '3m_dca_returns'), + _3mDcaStack: createMetricPattern4(this, '3m_dca_stack'), + _3yDcaAvgPrice: createMetricPattern4(this, '3y_dca_avg_price'), + _3yDcaCagr: createMetricPattern4(this, '3y_dca_cagr'), + _3yDcaReturns: createMetricPattern4(this, '3y_dca_returns'), + _3yDcaStack: createMetricPattern4(this, '3y_dca_stack'), + _4yDcaAvgPrice: createMetricPattern4(this, '4y_dca_avg_price'), + _4yDcaCagr: createMetricPattern4(this, '4y_dca_cagr'), + _4yDcaReturns: createMetricPattern4(this, '4y_dca_returns'), + _4yDcaStack: createMetricPattern4(this, '4y_dca_stack'), + _5yDcaAvgPrice: createMetricPattern4(this, '5y_dca_avg_price'), + _5yDcaCagr: createMetricPattern4(this, '5y_dca_cagr'), + _5yDcaReturns: createMetricPattern4(this, '5y_dca_returns'), + _5yDcaStack: createMetricPattern4(this, '5y_dca_stack'), + _6mDcaAvgPrice: createMetricPattern4(this, '6m_dca_avg_price'), + _6mDcaReturns: createMetricPattern4(this, '6m_dca_returns'), + _6mDcaStack: createMetricPattern4(this, '6m_dca_stack'), + _6yDcaAvgPrice: createMetricPattern4(this, '6y_dca_avg_price'), + _6yDcaCagr: createMetricPattern4(this, '6y_dca_cagr'), + _6yDcaReturns: createMetricPattern4(this, '6y_dca_returns'), + _6yDcaStack: createMetricPattern4(this, '6y_dca_stack'), + _8yDcaAvgPrice: createMetricPattern4(this, '8y_dca_avg_price'), + _8yDcaCagr: createMetricPattern4(this, '8y_dca_cagr'), + _8yDcaReturns: createMetricPattern4(this, '8y_dca_returns'), + _8yDcaStack: createMetricPattern4(this, '8y_dca_stack'), + dcaClass2015AvgPrice: createMetricPattern4(this, 'dca_class_2015_avg_price'), + dcaClass2015Returns: createMetricPattern4(this, 'dca_class_2015_returns'), + dcaClass2015Stack: createMetricPattern4(this, 'dca_class_2015_stack'), + dcaClass2016AvgPrice: createMetricPattern4(this, 'dca_class_2016_avg_price'), + dcaClass2016Returns: createMetricPattern4(this, 'dca_class_2016_returns'), + dcaClass2016Stack: createMetricPattern4(this, 'dca_class_2016_stack'), + dcaClass2017AvgPrice: createMetricPattern4(this, 'dca_class_2017_avg_price'), + dcaClass2017Returns: createMetricPattern4(this, 'dca_class_2017_returns'), + dcaClass2017Stack: createMetricPattern4(this, 'dca_class_2017_stack'), + dcaClass2018AvgPrice: createMetricPattern4(this, 'dca_class_2018_avg_price'), + dcaClass2018Returns: createMetricPattern4(this, 'dca_class_2018_returns'), + dcaClass2018Stack: createMetricPattern4(this, 'dca_class_2018_stack'), + dcaClass2019AvgPrice: createMetricPattern4(this, 'dca_class_2019_avg_price'), + dcaClass2019Returns: createMetricPattern4(this, 'dca_class_2019_returns'), + dcaClass2019Stack: createMetricPattern4(this, 'dca_class_2019_stack'), + dcaClass2020AvgPrice: createMetricPattern4(this, 'dca_class_2020_avg_price'), + dcaClass2020Returns: createMetricPattern4(this, 'dca_class_2020_returns'), + dcaClass2020Stack: createMetricPattern4(this, 'dca_class_2020_stack'), + dcaClass2021AvgPrice: createMetricPattern4(this, 'dca_class_2021_avg_price'), + dcaClass2021Returns: createMetricPattern4(this, 'dca_class_2021_returns'), + dcaClass2021Stack: createMetricPattern4(this, 'dca_class_2021_stack'), + dcaClass2022AvgPrice: createMetricPattern4(this, 'dca_class_2022_avg_price'), + dcaClass2022Returns: createMetricPattern4(this, 'dca_class_2022_returns'), + dcaClass2022Stack: createMetricPattern4(this, 'dca_class_2022_stack'), + dcaClass2023AvgPrice: createMetricPattern4(this, 'dca_class_2023_avg_price'), + dcaClass2023Returns: createMetricPattern4(this, 'dca_class_2023_returns'), + dcaClass2023Stack: createMetricPattern4(this, 'dca_class_2023_stack'), + dcaClass2024AvgPrice: createMetricPattern4(this, 'dca_class_2024_avg_price'), + dcaClass2024Returns: createMetricPattern4(this, 'dca_class_2024_returns'), + dcaClass2024Stack: createMetricPattern4(this, 'dca_class_2024_stack'), + dcaClass2025AvgPrice: createMetricPattern4(this, 'dca_class_2025_avg_price'), + dcaClass2025Returns: createMetricPattern4(this, 'dca_class_2025_returns'), + dcaClass2025Stack: createMetricPattern4(this, 'dca_class_2025_stack') + }, + indicators: { + gini: createMetricPattern21(this, 'gini'), + macdHistogram: createMetricPattern21(this, 'macd_histogram'), + macdLine: createMetricPattern21(this, 'macd_line'), + macdSignal: createMetricPattern21(this, 'macd_signal'), + nvt: createMetricPattern21(this, 'nvt'), + piCycle: createMetricPattern21(this, 'pi_cycle'), + puellMultiple: createMetricPattern4(this, 'puell_multiple'), + rsi14d: createMetricPattern21(this, 'rsi_14d'), + rsi14dMax: createMetricPattern21(this, 'rsi_14d_max'), + rsi14dMin: createMetricPattern21(this, 'rsi_14d_min'), + rsiAvgGain14d: createMetricPattern21(this, 'rsi_avg_gain_14d'), + rsiAvgLoss14d: createMetricPattern21(this, 'rsi_avg_loss_14d'), + rsiGains: createMetricPattern21(this, 'rsi_gains'), + rsiLosses: createMetricPattern21(this, 'rsi_losses'), + stochD: createMetricPattern21(this, 'stoch_d'), + stochK: createMetricPattern21(this, 'stoch_k'), + stochRsi: createMetricPattern21(this, 'stoch_rsi'), + stochRsiD: createMetricPattern21(this, 'stoch_rsi_d'), + stochRsiK: createMetricPattern21(this, 'stoch_rsi_k') + }, + lookback: { + price10yAgo: createMetricPattern4(this, 'price_10y_ago'), + price1dAgo: createMetricPattern4(this, 'price_1d_ago'), + price1mAgo: createMetricPattern4(this, 'price_1m_ago'), + price1wAgo: createMetricPattern4(this, 'price_1w_ago'), + price1yAgo: createMetricPattern4(this, 'price_1y_ago'), + price2yAgo: createMetricPattern4(this, 'price_2y_ago'), + price3mAgo: createMetricPattern4(this, 'price_3m_ago'), + price3yAgo: createMetricPattern4(this, 'price_3y_ago'), + price4yAgo: createMetricPattern4(this, 'price_4y_ago'), + price5yAgo: createMetricPattern4(this, 'price_5y_ago'), + price6mAgo: createMetricPattern4(this, 'price_6m_ago'), + price6yAgo: createMetricPattern4(this, 'price_6y_ago'), + price8yAgo: createMetricPattern4(this, 'price_8y_ago') + }, + movingAverage: { + price111dSma: createPrice111dSmaPattern(this, 'price_111d_sma'), + price12dEma: createPrice111dSmaPattern(this, 'price_12d_ema'), + price13dEma: createPrice111dSmaPattern(this, 'price_13d_ema'), + price13dSma: createPrice111dSmaPattern(this, 'price_13d_sma'), + price144dEma: createPrice111dSmaPattern(this, 'price_144d_ema'), + price144dSma: createPrice111dSmaPattern(this, 'price_144d_sma'), + price1mEma: createPrice111dSmaPattern(this, 'price_1m_ema'), + price1mSma: createPrice111dSmaPattern(this, 'price_1m_sma'), + price1wEma: createPrice111dSmaPattern(this, 'price_1w_ema'), + price1wSma: createPrice111dSmaPattern(this, 'price_1w_sma'), + price1yEma: createPrice111dSmaPattern(this, 'price_1y_ema'), + price1ySma: createPrice111dSmaPattern(this, 'price_1y_sma'), + price200dEma: createPrice111dSmaPattern(this, 'price_200d_ema'), + price200dSma: createPrice111dSmaPattern(this, 'price_200d_sma'), + price200dSmaX08: createMetricPattern4(this, 'price_200d_sma_x0_8'), + price200dSmaX24: createMetricPattern4(this, 'price_200d_sma_x2_4'), + price200wEma: createPrice111dSmaPattern(this, 'price_200w_ema'), + price200wSma: createPrice111dSmaPattern(this, 'price_200w_sma'), + price21dEma: createPrice111dSmaPattern(this, 'price_21d_ema'), + price21dSma: createPrice111dSmaPattern(this, 'price_21d_sma'), + price26dEma: createPrice111dSmaPattern(this, 'price_26d_ema'), + price2yEma: createPrice111dSmaPattern(this, 'price_2y_ema'), + price2ySma: createPrice111dSmaPattern(this, 'price_2y_sma'), + price34dEma: createPrice111dSmaPattern(this, 'price_34d_ema'), + price34dSma: createPrice111dSmaPattern(this, 'price_34d_sma'), + price350dSma: createPrice111dSmaPattern(this, 'price_350d_sma'), + price350dSmaX2: createMetricPattern4(this, 'price_350d_sma_x2'), + price4yEma: createPrice111dSmaPattern(this, 'price_4y_ema'), + price4ySma: createPrice111dSmaPattern(this, 'price_4y_sma'), + price55dEma: createPrice111dSmaPattern(this, 'price_55d_ema'), + price55dSma: createPrice111dSmaPattern(this, 'price_55d_sma'), + price89dEma: createPrice111dSmaPattern(this, 'price_89d_ema'), + price89dSma: createPrice111dSmaPattern(this, 'price_89d_sma'), + price8dEma: createPrice111dSmaPattern(this, 'price_8d_ema'), + price8dSma: createPrice111dSmaPattern(this, 'price_8d_sma') + }, + range: { + price1mMax: createMetricPattern4(this, 'price_1m_max'), + price1mMin: createMetricPattern4(this, 'price_1m_min'), + price1wMax: createMetricPattern4(this, 'price_1w_max'), + price1wMin: createMetricPattern4(this, 'price_1w_min'), + price1yMax: createMetricPattern4(this, 'price_1y_max'), + price1yMin: createMetricPattern4(this, 'price_1y_min'), + price2wChoppinessIndex: createMetricPattern4(this, 'price_2w_choppiness_index'), + price2wMax: createMetricPattern4(this, 'price_2w_max'), + price2wMin: createMetricPattern4(this, 'price_2w_min'), + priceTrueRange: createMetricPattern21(this, 'price_true_range'), + priceTrueRange2wSum: createMetricPattern21(this, 'price_true_range_2w_sum') + }, + returns: { + _1dReturns1mSd: create_1dReturns1mSdPattern(this, '1d_returns_1m_sd'), + _1dReturns1wSd: create_1dReturns1mSdPattern(this, '1d_returns_1w_sd'), + _1dReturns1ySd: create_1dReturns1mSdPattern(this, '1d_returns_1y_sd'), + _10yCagr: createMetricPattern4(this, '10y_cagr'), + _10yPriceReturns: createMetricPattern4(this, '10y_price_returns'), + _1dPriceReturns: createMetricPattern4(this, '1d_price_returns'), + _1mPriceReturns: createMetricPattern4(this, '1m_price_returns'), + _1wPriceReturns: createMetricPattern4(this, '1w_price_returns'), + _1yPriceReturns: createMetricPattern4(this, '1y_price_returns'), + _2yCagr: createMetricPattern4(this, '2y_cagr'), + _2yPriceReturns: createMetricPattern4(this, '2y_price_returns'), + _3mPriceReturns: createMetricPattern4(this, '3m_price_returns'), + _3yCagr: createMetricPattern4(this, '3y_cagr'), + _3yPriceReturns: createMetricPattern4(this, '3y_price_returns'), + _4yCagr: createMetricPattern4(this, '4y_cagr'), + _4yPriceReturns: createMetricPattern4(this, '4y_price_returns'), + _5yCagr: createMetricPattern4(this, '5y_cagr'), + _5yPriceReturns: createMetricPattern4(this, '5y_price_returns'), + _6mPriceReturns: createMetricPattern4(this, '6m_price_returns'), + _6yCagr: createMetricPattern4(this, '6y_cagr'), + _6yPriceReturns: createMetricPattern4(this, '6y_price_returns'), + _8yCagr: createMetricPattern4(this, '8y_cagr'), + _8yPriceReturns: createMetricPattern4(this, '8y_price_returns'), + downside1mSd: create_1dReturns1mSdPattern(this, 'downside_1m_sd'), + downside1wSd: create_1dReturns1mSdPattern(this, 'downside_1w_sd'), + downside1ySd: create_1dReturns1mSdPattern(this, 'downside_1y_sd'), + downsideReturns: createMetricPattern21(this, 'downside_returns') + }, + volatility: { + price1mVolatility: createMetricPattern4(this, 'price_1m_volatility'), + price1wVolatility: createMetricPattern4(this, 'price_1w_volatility'), + price1yVolatility: createMetricPattern4(this, 'price_1y_volatility'), + sharpe1m: createMetricPattern21(this, 'sharpe_1m'), + sharpe1w: createMetricPattern21(this, 'sharpe_1w'), + sharpe1y: createMetricPattern21(this, 'sharpe_1y'), + sortino1m: createMetricPattern21(this, 'sortino_1m'), + sortino1w: createMetricPattern21(this, 'sortino_1w'), + sortino1y: createMetricPattern21(this, 'sortino_1y') + } + }, + outputs: { + count: { + count: createBlockSizePattern(this, 'output_count'), + utxoCount: createBitcoinPattern(this, 'exact_utxo_count') + }, + spent: { + txinindex: createMetricPattern29(this, 'txinindex') + } + }, + pools: { + pool: createMetricPattern25(this, 'pool'), + vecs: { + aXbt: createAXbtPattern(this, 'axbt'), + aaoPool: createAXbtPattern(this, 'aaopool'), + antPool: createAXbtPattern(this, 'antpool'), + arkPool: createAXbtPattern(this, 'arkpool'), + asicMiner: createAXbtPattern(this, 'asicminer'), + batPool: createAXbtPattern(this, 'batpool'), + bcMonster: createAXbtPattern(this, 'bcmonster'), + bcpoolIo: createAXbtPattern(this, 'bcpoolio'), + binancePool: createAXbtPattern(this, 'binancepool'), + bitClub: createAXbtPattern(this, 'bitclub'), + bitFuFuPool: createAXbtPattern(this, 'bitfufupool'), + bitFury: createAXbtPattern(this, 'bitfury'), + bitMinter: createAXbtPattern(this, 'bitminter'), + bitalo: createAXbtPattern(this, 'bitalo'), + bitcoinAffiliateNetwork: createAXbtPattern(this, 'bitcoinaffiliatenetwork'), + bitcoinCom: createAXbtPattern(this, 'bitcoincom'), + bitcoinIndia: createAXbtPattern(this, 'bitcoinindia'), + bitcoinRussia: createAXbtPattern(this, 'bitcoinrussia'), + bitcoinUkraine: createAXbtPattern(this, 'bitcoinukraine'), + bitfarms: createAXbtPattern(this, 'bitfarms'), + bitparking: createAXbtPattern(this, 'bitparking'), + bitsolo: createAXbtPattern(this, 'bitsolo'), + bixin: createAXbtPattern(this, 'bixin'), + blockFills: createAXbtPattern(this, 'blockfills'), + braiinsPool: createAXbtPattern(this, 'braiinspool'), + bravoMining: createAXbtPattern(this, 'bravomining'), + btPool: createAXbtPattern(this, 'btpool'), + btcCom: createAXbtPattern(this, 'btccom'), + btcDig: createAXbtPattern(this, 'btcdig'), + btcGuild: createAXbtPattern(this, 'btcguild'), + btcLab: createAXbtPattern(this, 'btclab'), + btcMp: createAXbtPattern(this, 'btcmp'), + btcNuggets: createAXbtPattern(this, 'btcnuggets'), + btcPoolParty: createAXbtPattern(this, 'btcpoolparty'), + btcServ: createAXbtPattern(this, 'btcserv'), + btcTop: createAXbtPattern(this, 'btctop'), + btcc: createAXbtPattern(this, 'btcc'), + bwPool: createAXbtPattern(this, 'bwpool'), + bytePool: createAXbtPattern(this, 'bytepool'), + canoe: createAXbtPattern(this, 'canoe'), + canoePool: createAXbtPattern(this, 'canoepool'), + carbonNegative: createAXbtPattern(this, 'carbonnegative'), + ckPool: createAXbtPattern(this, 'ckpool'), + cloudHashing: createAXbtPattern(this, 'cloudhashing'), + coinLab: createAXbtPattern(this, 'coinlab'), + cointerra: createAXbtPattern(this, 'cointerra'), + connectBtc: createAXbtPattern(this, 'connectbtc'), + dPool: createAXbtPattern(this, 'dpool'), + dcExploration: createAXbtPattern(this, 'dcexploration'), + dcex: createAXbtPattern(this, 'dcex'), + digitalBtc: createAXbtPattern(this, 'digitalbtc'), + digitalXMintsy: createAXbtPattern(this, 'digitalxmintsy'), + eclipseMc: createAXbtPattern(this, 'eclipsemc'), + eightBaochi: createAXbtPattern(this, 'eightbaochi'), + ekanemBtc: createAXbtPattern(this, 'ekanembtc'), + eligius: createAXbtPattern(this, 'eligius'), + emcdPool: createAXbtPattern(this, 'emcdpool'), + entrustCharityPool: createAXbtPattern(this, 'entrustcharitypool'), + eobot: createAXbtPattern(this, 'eobot'), + exxBw: createAXbtPattern(this, 'exxbw'), + f2Pool: createAXbtPattern(this, 'f2pool'), + fiftyEightCoin: createAXbtPattern(this, 'fiftyeightcoin'), + foundryUsa: createAXbtPattern(this, 'foundryusa'), + futureBitApolloSolo: createAXbtPattern(this, 'futurebitapollosolo'), + gbMiners: createAXbtPattern(this, 'gbminers'), + ghashIo: createAXbtPattern(this, 'ghashio'), + giveMeCoins: createAXbtPattern(this, 'givemecoins'), + goGreenLight: createAXbtPattern(this, 'gogreenlight'), + haoZhuZhu: createAXbtPattern(this, 'haozhuzhu'), + haominer: createAXbtPattern(this, 'haominer'), + hashBx: createAXbtPattern(this, 'hashbx'), + hashPool: createAXbtPattern(this, 'hashpool'), + helix: createAXbtPattern(this, 'helix'), + hhtt: createAXbtPattern(this, 'hhtt'), + hotPool: createAXbtPattern(this, 'hotpool'), + hummerpool: createAXbtPattern(this, 'hummerpool'), + huobiPool: createAXbtPattern(this, 'huobipool'), + innopolisTech: createAXbtPattern(this, 'innopolistech'), + kanoPool: createAXbtPattern(this, 'kanopool'), + kncMiner: createAXbtPattern(this, 'kncminer'), + kuCoinPool: createAXbtPattern(this, 'kucoinpool'), + lubianCom: createAXbtPattern(this, 'lubiancom'), + luckyPool: createAXbtPattern(this, 'luckypool'), + luxor: createAXbtPattern(this, 'luxor'), + maraPool: createAXbtPattern(this, 'marapool'), + maxBtc: createAXbtPattern(this, 'maxbtc'), + maxiPool: createAXbtPattern(this, 'maxipool'), + megaBigPower: createAXbtPattern(this, 'megabigpower'), + minerium: createAXbtPattern(this, 'minerium'), + miningCity: createAXbtPattern(this, 'miningcity'), + miningDutch: createAXbtPattern(this, 'miningdutch'), + miningKings: createAXbtPattern(this, 'miningkings'), + miningSquared: createAXbtPattern(this, 'miningsquared'), + mmpool: createAXbtPattern(this, 'mmpool'), + mtRed: createAXbtPattern(this, 'mtred'), + multiCoinCo: createAXbtPattern(this, 'multicoinco'), + multipool: createAXbtPattern(this, 'multipool'), + myBtcCoinPool: createAXbtPattern(this, 'mybtccoinpool'), + neopool: createAXbtPattern(this, 'neopool'), + nexious: createAXbtPattern(this, 'nexious'), + niceHash: createAXbtPattern(this, 'nicehash'), + nmcBit: createAXbtPattern(this, 'nmcbit'), + novaBlock: createAXbtPattern(this, 'novablock'), + ocean: createAXbtPattern(this, 'ocean'), + okExPool: createAXbtPattern(this, 'okexpool'), + okMiner: createAXbtPattern(this, 'okminer'), + okkong: createAXbtPattern(this, 'okkong'), + okpoolTop: createAXbtPattern(this, 'okpooltop'), + oneHash: createAXbtPattern(this, 'onehash'), + oneM1x: createAXbtPattern(this, 'onem1x'), + oneThash: createAXbtPattern(this, 'onethash'), + ozCoin: createAXbtPattern(this, 'ozcoin'), + pHashIo: createAXbtPattern(this, 'phashio'), + parasite: createAXbtPattern(this, 'parasite'), + patels: createAXbtPattern(this, 'patels'), + pegaPool: createAXbtPattern(this, 'pegapool'), + phoenix: createAXbtPattern(this, 'phoenix'), + polmine: createAXbtPattern(this, 'polmine'), + pool175btc: createAXbtPattern(this, 'pool175btc'), + pool50btc: createAXbtPattern(this, 'pool50btc'), + poolin: createAXbtPattern(this, 'poolin'), + portlandHodl: createAXbtPattern(this, 'portlandhodl'), + publicPool: createAXbtPattern(this, 'publicpool'), + pureBtcCom: createAXbtPattern(this, 'purebtccom'), + rawpool: createAXbtPattern(this, 'rawpool'), + rigPool: createAXbtPattern(this, 'rigpool'), + sbiCrypto: createAXbtPattern(this, 'sbicrypto'), + secPool: createAXbtPattern(this, 'secpool'), + secretSuperstar: createAXbtPattern(this, 'secretsuperstar'), + sevenPool: createAXbtPattern(this, 'sevenpool'), + shawnP0wers: createAXbtPattern(this, 'shawnp0wers'), + sigmapoolCom: createAXbtPattern(this, 'sigmapoolcom'), + simplecoinUs: createAXbtPattern(this, 'simplecoinus'), + soloCk: createAXbtPattern(this, 'solock'), + spiderPool: createAXbtPattern(this, 'spiderpool'), + stMiningCorp: createAXbtPattern(this, 'stminingcorp'), + tangpool: createAXbtPattern(this, 'tangpool'), + tatmasPool: createAXbtPattern(this, 'tatmaspool'), + tbDice: createAXbtPattern(this, 'tbdice'), + telco214: createAXbtPattern(this, 'telco214'), + terraPool: createAXbtPattern(this, 'terrapool'), + tiger: createAXbtPattern(this, 'tiger'), + tigerpoolNet: createAXbtPattern(this, 'tigerpoolnet'), + titan: createAXbtPattern(this, 'titan'), + transactionCoinMining: createAXbtPattern(this, 'transactioncoinmining'), + trickysBtcPool: createAXbtPattern(this, 'trickysbtcpool'), + tripleMining: createAXbtPattern(this, 'triplemining'), + twentyOneInc: createAXbtPattern(this, 'twentyoneinc'), + ultimusPool: createAXbtPattern(this, 'ultimuspool'), + unknown: createAXbtPattern(this, 'unknown'), + unomp: createAXbtPattern(this, 'unomp'), + viaBtc: createAXbtPattern(this, 'viabtc'), + waterhole: createAXbtPattern(this, 'waterhole'), + wayiCn: createAXbtPattern(this, 'wayicn'), + whitePool: createAXbtPattern(this, 'whitepool'), + wk057: createAXbtPattern(this, 'wk057'), + yourbtcNet: createAXbtPattern(this, 'yourbtcnet'), + zulupool: createAXbtPattern(this, 'zulupool') + } + }, + positions: { + position: createMetricPattern16(this, 'position') + }, + price: { + ohlc: { + ohlcInCents: createMetricPattern9(this, 'ohlc_in_cents') + }, + sats: { + priceCloseInSats: createMetricPattern1(this, 'price_close_in_sats'), + priceHighInSats: createMetricPattern1(this, 'price_high_in_sats'), + priceLowInSats: createMetricPattern1(this, 'price_low_in_sats'), + priceOhlcInSats: createMetricPattern1(this, 'price_ohlc_in_sats'), + priceOpenInSats: createMetricPattern1(this, 'price_open_in_sats') + }, + usd: { + priceClose: createMetricPattern1(this, 'price_close'), + priceCloseInCents: createMetricPattern9(this, 'price_close_in_cents'), + priceHigh: createMetricPattern1(this, 'price_high'), + priceHighInCents: createMetricPattern9(this, 'price_high_in_cents'), + priceLow: createMetricPattern1(this, 'price_low'), + priceLowInCents: createMetricPattern9(this, 'price_low_in_cents'), + priceOhlc: createMetricPattern1(this, 'price_ohlc'), + priceOpen: createMetricPattern1(this, 'price_open'), + priceOpenInCents: createMetricPattern9(this, 'price_open_in_cents') + } + }, + scripts: { + count: { + emptyoutputCount: createBitcoinPattern(this, 'emptyoutput_count'), + opreturnCount: createBitcoinPattern(this, 'opreturn_count'), + p2aCount: createBitcoinPattern(this, 'p2a_count'), + p2msCount: createBitcoinPattern(this, 'p2ms_count'), + p2pk33Count: createBitcoinPattern(this, 'p2pk33_count'), + p2pk65Count: createBitcoinPattern(this, 'p2pk65_count'), + p2pkhCount: createBitcoinPattern(this, 'p2pkh_count'), + p2shCount: createBitcoinPattern(this, 'p2sh_count'), + p2trCount: createBitcoinPattern(this, 'p2tr_count'), + p2wpkhCount: createBitcoinPattern(this, 'p2wpkh_count'), + p2wshCount: createBitcoinPattern(this, 'p2wsh_count'), + segwitAdoption: createSegwitAdoptionPattern(this, 'segwit_adoption'), + segwitCount: createBitcoinPattern(this, 'segwit_count'), + taprootAdoption: createSegwitAdoptionPattern(this, 'taproot_adoption'), + unknownoutputCount: createBitcoinPattern(this, 'unknownoutput_count') + }, + value: { + opreturnValue: { + base: createMetricPattern25(this, 'opreturn_value'), + bitcoin: createSegwitAdoptionPattern(this, 'opreturn_value_btc'), + dollars: createSegwitAdoptionPattern(this, 'opreturn_value_usd'), + sats: { + average: createMetricPattern2(this, 'opreturn_value_avg'), + cumulative: createMetricPattern1(this, 'opreturn_value_cumulative'), + max: createMetricPattern2(this, 'opreturn_value_max'), + min: createMetricPattern2(this, 'opreturn_value_min'), + sum: createMetricPattern2(this, 'opreturn_value_sum') + } + } + } + }, + supply: { + burned: { + opreturn: createOpreturnPattern(this, 'opreturn_supply'), + unspendable: createOpreturnPattern(this, 'unspendable_supply') + }, + circulating: { + btc: createMetricPattern25(this, 'circulating_btc'), + indexes: createActiveSupplyPattern(this, 'circulating'), + sats: createMetricPattern25(this, 'circulating_sats'), + usd: createMetricPattern25(this, 'circulating_usd') + }, + inflation: { + indexes: createMetricPattern4(this, 'inflation_rate') + }, + marketCap: { + height: createMetricPattern25(this, 'market_cap'), + indexes: createMetricPattern4(this, 'market_cap') + }, + velocity: { + btc: createMetricPattern4(this, 'btc_velocity'), + usd: createMetricPattern4(this, 'usd_velocity') + } + }, + transactions: { + count: { + isCoinbase: createMetricPattern41(this, 'is_coinbase'), + txCount: createBitcoinPattern(this, 'tx_count') + }, + fees: { + fee: { + base: createMetricPattern41(this, 'fee'), + bitcoin: createBlockSizePattern(this, 'fee_btc'), + bitcoinTxindex: createMetricPattern41(this, 'fee_btc'), + dollars: createBlockSizePattern(this, 'fee_usd'), + dollarsTxindex: createMetricPattern41(this, 'fee_usd'), + sats: createBlockSizePattern(this, 'fee') + }, + feeRate: { + average: createMetricPattern1(this, 'fee_rate_avg'), + base: createMetricPattern41(this, 'fee_rate'), + max: createMetricPattern1(this, 'fee_rate_max'), + median: createMetricPattern25(this, 'fee_rate_median'), + min: createMetricPattern1(this, 'fee_rate_min'), + pct10: createMetricPattern25(this, 'fee_rate_pct10'), + pct25: createMetricPattern25(this, 'fee_rate_pct25'), + pct75: createMetricPattern25(this, 'fee_rate_pct75'), + pct90: createMetricPattern25(this, 'fee_rate_pct90') + }, + inputValue: createMetricPattern41(this, 'input_value'), + outputValue: createMetricPattern41(this, 'output_value') + }, + size: { + txVsize: createBlockIntervalPattern(this, 'tx_vsize'), + txWeight: createBlockIntervalPattern(this, 'tx_weight'), + vsize: createMetricPattern41(this, 'vsize'), + weight: createMetricPattern41(this, 'weight') + }, + versions: { + txV1: createBlockCountPattern(this, 'tx_v1'), + txV2: createBlockCountPattern(this, 'tx_v2'), + txV3: createBlockCountPattern(this, 'tx_v3') + }, + volume: { + annualizedVolume: createMetricPattern4(this, 'annualized_volume'), + annualizedVolumeBtc: createMetricPattern4(this, 'annualized_volume_btc'), + annualizedVolumeUsd: createMetricPattern4(this, 'annualized_volume_usd'), + inputsPerSec: createMetricPattern4(this, 'inputs_per_sec'), + outputsPerSec: createMetricPattern4(this, 'outputs_per_sec'), + sentSum: { + bitcoin: createTotalRealizedPnlPattern(this, 'sent_sum_btc'), + dollars: createMetricPattern1(this, 'sent_sum_usd'), + sats: createMetricPattern1(this, 'sent_sum') + }, + txPerSec: createMetricPattern4(this, 'tx_per_sec') + } } }, indexed: { address: { - firstP2aaddressindex: createIndexes2(this, '/first_p2aaddressindex'), - firstP2pk33addressindex: createIndexes2(this, '/first_p2pk33addressindex'), - firstP2pk65addressindex: createIndexes2(this, '/first_p2pk65addressindex'), - firstP2pkhaddressindex: createIndexes2(this, '/first_p2pkhaddressindex'), - firstP2shaddressindex: createIndexes2(this, '/first_p2shaddressindex'), - firstP2traddressindex: createIndexes2(this, '/first_p2traddressindex'), - firstP2wpkhaddressindex: createIndexes2(this, '/first_p2wpkhaddressindex'), - firstP2wshaddressindex: createIndexes2(this, '/first_p2wshaddressindex'), - p2abytes: createIndexes16(this, '/p2abytes'), - p2pk33bytes: createIndexes17(this, '/p2pk33bytes'), - p2pk65bytes: createIndexes18(this, '/p2pk65bytes'), - p2pkhbytes: createIndexes19(this, '/p2pkhbytes'), - p2shbytes: createIndexes20(this, '/p2shbytes'), - p2trbytes: createIndexes21(this, '/p2trbytes'), - p2wpkhbytes: createIndexes22(this, '/p2wpkhbytes'), - p2wshbytes: createIndexes23(this, '/p2wshbytes') + firstP2aaddressindex: createMetricPattern25(this, 'first_p2aaddressindex'), + firstP2pk33addressindex: createMetricPattern25(this, 'first_p2pk33addressindex'), + firstP2pk65addressindex: createMetricPattern25(this, 'first_p2pk65addressindex'), + firstP2pkhaddressindex: createMetricPattern25(this, 'first_p2pkhaddressindex'), + firstP2shaddressindex: createMetricPattern25(this, 'first_p2shaddressindex'), + firstP2traddressindex: createMetricPattern25(this, 'first_p2traddressindex'), + firstP2wpkhaddressindex: createMetricPattern25(this, 'first_p2wpkhaddressindex'), + firstP2wshaddressindex: createMetricPattern25(this, 'first_p2wshaddressindex'), + p2abytes: createMetricPattern30(this, 'p2abytes'), + p2pk33bytes: createMetricPattern32(this, 'p2pk33bytes'), + p2pk65bytes: createMetricPattern33(this, 'p2pk65bytes'), + p2pkhbytes: createMetricPattern34(this, 'p2pkhbytes'), + p2shbytes: createMetricPattern35(this, 'p2shbytes'), + p2trbytes: createMetricPattern36(this, 'p2trbytes'), + p2wpkhbytes: createMetricPattern37(this, 'p2wpkhbytes'), + p2wshbytes: createMetricPattern38(this, 'p2wshbytes') }, block: { - blockhash: createIndexes2(this, '/blockhash'), - difficulty: createIndexes2(this, '/difficulty'), - timestamp: createIndexes2(this, '/timestamp'), - totalSize: createIndexes2(this, '/total_size'), - weight: createIndexes2(this, '/weight') + blockhash: createMetricPattern25(this, 'blockhash'), + difficulty: createMetricPattern25(this, 'difficulty'), + timestamp: createMetricPattern25(this, 'timestamp'), + totalSize: createMetricPattern25(this, 'total_size'), + weight: createMetricPattern25(this, 'weight') }, output: { - firstEmptyoutputindex: createIndexes2(this, '/first_emptyoutputindex'), - firstOpreturnindex: createIndexes2(this, '/first_opreturnindex'), - firstP2msoutputindex: createIndexes2(this, '/first_p2msoutputindex'), - firstUnknownoutputindex: createIndexes2(this, '/first_unknownoutputindex'), - txindex: new MetricNode(this, '/txindex') + firstEmptyoutputindex: createMetricPattern25(this, 'first_emptyoutputindex'), + firstOpreturnindex: createMetricPattern25(this, 'first_opreturnindex'), + firstP2msoutputindex: createMetricPattern25(this, 'first_p2msoutputindex'), + firstUnknownoutputindex: createMetricPattern25(this, 'first_unknownoutputindex'), + txindex: createMetricPattern7(this, 'txindex') }, tx: { - baseSize: createIndexes6(this, '/base_size'), - firstTxindex: createIndexes2(this, '/first_txindex'), - firstTxinindex: createIndexes6(this, '/first_txinindex'), - firstTxoutindex: createIndexes6(this, '/first_txoutindex'), - height: createIndexes6(this, '/height'), - isExplicitlyRbf: createIndexes6(this, '/is_explicitly_rbf'), - rawlocktime: createIndexes6(this, '/rawlocktime'), - totalSize: createIndexes6(this, '/total_size'), - txid: createIndexes6(this, '/txid'), - txversion: createIndexes6(this, '/txversion') + baseSize: createMetricPattern41(this, 'base_size'), + firstTxindex: createMetricPattern25(this, 'first_txindex'), + firstTxinindex: createMetricPattern41(this, 'first_txinindex'), + firstTxoutindex: createMetricPattern41(this, 'first_txoutindex'), + height: createMetricPattern41(this, 'height'), + isExplicitlyRbf: createMetricPattern41(this, 'is_explicitly_rbf'), + rawlocktime: createMetricPattern41(this, 'rawlocktime'), + totalSize: createMetricPattern41(this, 'total_size'), + txid: createMetricPattern41(this, 'txid'), + txversion: createMetricPattern41(this, 'txversion') }, txin: { - firstTxinindex: createIndexes2(this, '/first_txinindex'), - outpoint: createIndexes24(this, '/outpoint'), - outputtype: createIndexes24(this, '/outputtype'), - txindex: createIndexes24(this, '/txindex'), - typeindex: createIndexes24(this, '/typeindex') + firstTxinindex: createMetricPattern25(this, 'first_txinindex'), + outpoint: createMetricPattern26(this, 'outpoint'), + outputtype: createMetricPattern26(this, 'outputtype'), + txindex: createMetricPattern26(this, 'txindex'), + typeindex: createMetricPattern26(this, 'typeindex') }, txout: { - firstTxoutindex: createIndexes2(this, '/first_txoutindex'), - outputtype: createIndexes25(this, '/outputtype'), - txindex: createIndexes25(this, '/txindex'), - typeindex: createIndexes25(this, '/typeindex'), - value: createIndexes25(this, '/value') + firstTxoutindex: createMetricPattern25(this, 'first_txoutindex'), + outputtype: createMetricPattern29(this, 'outputtype'), + txindex: createMetricPattern29(this, 'txindex'), + typeindex: createMetricPattern29(this, 'typeindex'), + value: createMetricPattern29(this, 'value') } } }; @@ -5510,22 +6539,22 @@ class BrkClient extends BrkClientBase { /** * Address information * @description Retrieve comprehensive information about a Bitcoin address including balance, transaction history, UTXOs, and estimated investment metrics. Supports all standard Bitcoin address types (P2PKH, P2SH, P2WPKH, P2WSH, P2TR, etc.). - * @param {string} address + * @param {Address} address * @returns {Promise} */ - async getApiAddressByAddress(address) { + async getAddress(address) { return this.get(`/api/address/${address}`); } /** * Address transaction IDs * @description Get transaction IDs for an address, newest first. Use after_txid for pagination. - * @param {string} address + * @param {Address} address * @param {string=} [after_txid] Txid to paginate from (return transactions before this one) - * @param {string=} [limit] Maximum number of results to return. Defaults to 25 if not specified. + * @param {number=} [limit] Maximum number of results to return. Defaults to 25 if not specified. * @returns {Promise} */ - async getApiAddressByAddressTxs(address, after_txid, limit) { + async getAddressTxs(address, after_txid, limit) { const params = new URLSearchParams(); if (after_txid !== undefined) params.set('after_txid', String(after_txid)); if (limit !== undefined) params.set('limit', String(limit)); @@ -5536,12 +6565,12 @@ class BrkClient extends BrkClientBase { /** * Address confirmed transactions * @description Get confirmed transaction IDs for an address, 25 per page. Use ?after_txid= for pagination. - * @param {string} address + * @param {Address} address * @param {string=} [after_txid] Txid to paginate from (return transactions before this one) - * @param {string=} [limit] Maximum number of results to return. Defaults to 25 if not specified. + * @param {number=} [limit] Maximum number of results to return. Defaults to 25 if not specified. * @returns {Promise} */ - async getApiAddressByAddressTxsChain(address, after_txid, limit) { + async getAddressTxsChain(address, after_txid, limit) { const params = new URLSearchParams(); if (after_txid !== undefined) params.set('after_txid', String(after_txid)); if (limit !== undefined) params.set('limit', String(limit)); @@ -5552,92 +6581,92 @@ class BrkClient extends BrkClientBase { /** * Address mempool transactions * @description Get unconfirmed transaction IDs for an address from the mempool (up to 50). - * @param {string} address + * @param {Address} address * @returns {Promise} */ - async getApiAddressByAddressTxsMempool(address) { + async getAddressTxsMempool(address) { return this.get(`/api/address/${address}/txs/mempool`); } /** * Address UTXOs * @description Get unspent transaction outputs for an address. - * @param {string} address + * @param {Address} address * @returns {Promise} */ - async getApiAddressByAddressUtxo(address) { + async getAddressUtxo(address) { return this.get(`/api/address/${address}/utxo`); } /** * Block by height * @description Retrieve block information by block height. Returns block metadata including hash, timestamp, difficulty, size, weight, and transaction count. - * @param {string} height + * @param {Height} height * @returns {Promise} */ - async getApiBlockHeightByHeight(height) { + async getBlockHeight(height) { return this.get(`/api/block-height/${height}`); } /** * Block information * @description Retrieve block information by block hash. Returns block metadata including height, timestamp, difficulty, size, weight, and transaction count. - * @param {string} hash + * @param {BlockHash} hash * @returns {Promise} */ - async getApiBlockByHash(hash) { + async getBlockByHash(hash) { return this.get(`/api/block/${hash}`); } /** * Raw block * @description Returns the raw block data in binary format. - * @param {string} hash + * @param {BlockHash} hash * @returns {Promise} */ - async getApiBlockByHashRaw(hash) { + async getBlockByHashRaw(hash) { return this.get(`/api/block/${hash}/raw`); } /** * Block status * @description Retrieve the status of a block. Returns whether the block is in the best chain and, if so, its height and the hash of the next block. - * @param {string} hash + * @param {BlockHash} hash * @returns {Promise} */ - async getApiBlockByHashStatus(hash) { + async getBlockByHashStatus(hash) { return this.get(`/api/block/${hash}/status`); } /** * Transaction ID at index * @description Retrieve a single transaction ID at a specific index within a block. Returns plain text txid. - * @param {string} hash Bitcoin block hash - * @param {string} index Transaction index within the block (0-based) + * @param {BlockHash} hash Bitcoin block hash + * @param {TxIndex} index Transaction index within the block (0-based) * @returns {Promise} */ - async getApiBlockByHashTxidByIndex(hash, index) { + async getBlockByHashTxidByIndex(hash, index) { return this.get(`/api/block/${hash}/txid/${index}`); } /** * Block transaction IDs * @description Retrieve all transaction IDs in a block by block hash. - * @param {string} hash + * @param {BlockHash} hash * @returns {Promise} */ - async getApiBlockByHashTxids(hash) { + async getBlockByHashTxids(hash) { return this.get(`/api/block/${hash}/txids`); } /** * Block transactions (paginated) * @description Retrieve transactions in a block by block hash, starting from the specified index. Returns up to 25 transactions at a time. - * @param {string} hash Bitcoin block hash - * @param {string} start_index Starting transaction index within the block (0-based) + * @param {BlockHash} hash Bitcoin block hash + * @param {TxIndex} start_index Starting transaction index within the block (0-based) * @returns {Promise} */ - async getApiBlockByHashTxsByStartIndex(hash, start_index) { + async getBlockByHashTxsByStartIndex(hash, start_index) { return this.get(`/api/block/${hash}/txs/${start_index}`); } @@ -5646,17 +6675,17 @@ class BrkClient extends BrkClientBase { * @description Retrieve the last 10 blocks. Returns block metadata for each block. * @returns {Promise} */ - async getApiBlocks() { + async getBlocks() { return this.get(`/api/blocks`); } /** * Blocks from height * @description Retrieve up to 10 blocks going backwards from the given height. For example, height=100 returns blocks 100, 99, 98, ..., 91. Height=0 returns only block 0. - * @param {string} height + * @param {Height} height * @returns {Promise} */ - async getApiBlocksByHeight(height) { + async getBlocksByHeight(height) { return this.get(`/api/blocks/${height}`); } @@ -5665,7 +6694,7 @@ class BrkClient extends BrkClientBase { * @description Get current mempool statistics including transaction count, total vsize, and total fees. * @returns {Promise} */ - async getApiMempoolInfo() { + async getMempoolInfo() { return this.get(`/api/mempool/info`); } @@ -5674,32 +6703,32 @@ class BrkClient extends BrkClientBase { * @description Get all transaction IDs currently in the mempool. * @returns {Promise} */ - async getApiMempoolTxids() { + async getMempoolTxids() { return this.get(`/api/mempool/txids`); } /** * Get supported indexes for a metric * @description Returns the list of indexes are supported by the specified metric. For example, `realized_price` might be available on dateindex, weekindex, and monthindex. - * @param {string} metric + * @param {Metric} metric * @returns {Promise} */ - async getApiMetricByMetric(metric) { + async getMetric(metric) { return this.get(`/api/metric/${metric}`); } /** * Get metric data * @description Fetch data for a specific metric at the given index. Use query parameters to filter by date range and format (json/csv). - * @param {string} metric Metric name - * @param {string} index Aggregation index - * @param {string=} [from] Inclusive starting index, if negative counts from end - * @param {string=} [to] Exclusive ending index, if negative counts from end - * @param {string=} [count] Number of values to return (ignored if `to` is set) - * @param {string=} [format] Format of the output + * @param {Metric} metric Metric name + * @param {Index} index Aggregation index + * @param {*=} [from] Inclusive starting index, if negative counts from end + * @param {*=} [to] Exclusive ending index, if negative counts from end + * @param {*=} [count] Number of values to return (ignored if `to` is set) + * @param {Format=} [format] Format of the output * @returns {Promise} */ - async getApiMetricByMetricByIndex(metric, index, from, to, count, format) { + async getMetricByIndex(metric, index, from, to, count, format) { const params = new URLSearchParams(); if (from !== undefined) params.set('from', String(from)); if (to !== undefined) params.set('to', String(to)); @@ -5712,15 +6741,15 @@ class BrkClient extends BrkClientBase { /** * Bulk metric data * @description Fetch multiple metrics in a single request. Supports filtering by index and date range. Returns an array of MetricData objects. - * @param {string} [metrics] Requested metrics - * @param {string} [index] Index to query - * @param {string=} [from] Inclusive starting index, if negative counts from end - * @param {string=} [to] Exclusive ending index, if negative counts from end - * @param {string=} [count] Number of values to return (ignored if `to` is set) - * @param {string=} [format] Format of the output + * @param {Metrics} [metrics] Requested metrics + * @param {Index} [index] Index to query + * @param {*=} [from] Inclusive starting index, if negative counts from end + * @param {*=} [to] Exclusive ending index, if negative counts from end + * @param {*=} [count] Number of values to return (ignored if `to` is set) + * @param {Format=} [format] Format of the output * @returns {Promise} */ - async getApiMetricsBulk(metrics, index, from, to, count, format) { + async getMetricsBulk(metrics, index, from, to, count, format) { const params = new URLSearchParams(); params.set('metrics', String(metrics)); params.set('index', String(index)); @@ -5737,7 +6766,7 @@ class BrkClient extends BrkClientBase { * @description Returns the complete hierarchical catalog of available metrics organized as a tree structure. Metrics are grouped by categories and subcategories. Best viewed in an interactive JSON viewer (e.g., Firefox's built-in JSON viewer) for easy navigation of the nested structure. * @returns {Promise} */ - async getApiMetricsCatalog() { + async getMetricsCatalog() { return this.get(`/api/metrics/catalog`); } @@ -5746,7 +6775,7 @@ class BrkClient extends BrkClientBase { * @description Current metric count * @returns {Promise} */ - async getApiMetricsCount() { + async getMetricsCount() { return this.get(`/api/metrics/count`); } @@ -5755,17 +6784,17 @@ class BrkClient extends BrkClientBase { * @description Returns all available indexes with their accepted query aliases. Use any alias when querying metrics. * @returns {Promise} */ - async getApiMetricsIndexes() { + async getMetricsIndexes() { return this.get(`/api/metrics/indexes`); } /** * Metrics list * @description Paginated list of available metrics - * @param {string=} [page] Pagination index + * @param {*=} [page] Pagination index * @returns {Promise} */ - async getApiMetricsList(page) { + async getMetricsList(page) { const params = new URLSearchParams(); if (page !== undefined) params.set('page', String(page)); const query = params.toString(); @@ -5775,11 +6804,11 @@ class BrkClient extends BrkClientBase { /** * Search metrics * @description Fuzzy search for metrics by name. Supports partial matches and typos. - * @param {string} metric - * @param {string=} [limit] + * @param {Metric} metric + * @param {Limit=} [limit] * @returns {Promise} */ - async getApiMetricsSearchByMetric(metric, limit) { + async getMetricsSearchByMetric(metric, limit) { const params = new URLSearchParams(); if (limit !== undefined) params.set('limit', String(limit)); const query = params.toString(); @@ -5789,51 +6818,51 @@ class BrkClient extends BrkClientBase { /** * Transaction information * @description Retrieve complete transaction data by transaction ID (txid). Returns the full transaction details including inputs, outputs, and metadata. The transaction data is read directly from the blockchain data files. - * @param {string} txid + * @param {Txid} txid * @returns {Promise} */ - async getApiTxByTxid(txid) { + async getTxByTxid(txid) { return this.get(`/api/tx/${txid}`); } /** * Transaction hex * @description Retrieve the raw transaction as a hex-encoded string. Returns the serialized transaction in hexadecimal format. - * @param {string} txid + * @param {Txid} txid * @returns {Promise} */ - async getApiTxByTxidHex(txid) { + async getTxByTxidHex(txid) { return this.get(`/api/tx/${txid}/hex`); } /** * Output spend status * @description Get the spending status of a transaction output. Returns whether the output has been spent and, if so, the spending transaction details. - * @param {string} txid Transaction ID - * @param {string} vout Output index + * @param {Txid} txid Transaction ID + * @param {Vout} vout Output index * @returns {Promise} */ - async getApiTxByTxidOutspendByVout(txid, vout) { + async getTxByTxidOutspendByVout(txid, vout) { return this.get(`/api/tx/${txid}/outspend/${vout}`); } /** * All output spend statuses * @description Get the spending status of all outputs in a transaction. Returns an array with the spend status for each output. - * @param {string} txid + * @param {Txid} txid * @returns {Promise} */ - async getApiTxByTxidOutspends(txid) { + async getTxByTxidOutspends(txid) { return this.get(`/api/tx/${txid}/outspends`); } /** * Transaction status * @description Retrieve the confirmation status of a transaction. Returns whether the transaction is confirmed and, if so, the block height, hash, and timestamp. - * @param {string} txid + * @param {Txid} txid * @returns {Promise} */ - async getApiTxByTxidStatus(txid) { + async getTxByTxidStatus(txid) { return this.get(`/api/tx/${txid}/status`); } @@ -5842,7 +6871,7 @@ class BrkClient extends BrkClientBase { * @description Get current difficulty adjustment information including progress through the current epoch, estimated retarget date, and difficulty change prediction. * @returns {Promise} */ - async getApiV1DifficultyAdjustment() { + async getV1DifficultyAdjustment() { return this.get(`/api/v1/difficulty-adjustment`); } @@ -5851,7 +6880,7 @@ class BrkClient extends BrkClientBase { * @description Get projected blocks from the mempool for fee estimation. Each block contains statistics about transactions that would be included if a block were mined now. * @returns {Promise} */ - async getApiV1FeesMempoolBlocks() { + async getV1FeesMempoolBlocks() { return this.get(`/api/v1/fees/mempool-blocks`); } @@ -5860,47 +6889,47 @@ class BrkClient extends BrkClientBase { * @description Get recommended fee rates for different confirmation targets based on current mempool state. * @returns {Promise} */ - async getApiV1FeesRecommended() { + async getV1FeesRecommended() { return this.get(`/api/v1/fees/recommended`); } /** * Block fees * @description Get average block fees for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y - * @param {string} time_period + * @param {TimePeriod} time_period * @returns {Promise} */ - async getApiV1MiningBlocksFeesByTimePeriod(time_period) { + async getV1MiningBlocksFeesByTimePeriod(time_period) { return this.get(`/api/v1/mining/blocks/fees/${time_period}`); } /** * Block rewards * @description Get average block rewards (coinbase = subsidy + fees) for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y - * @param {string} time_period + * @param {TimePeriod} time_period * @returns {Promise} */ - async getApiV1MiningBlocksRewardsByTimePeriod(time_period) { + async getV1MiningBlocksRewardsByTimePeriod(time_period) { return this.get(`/api/v1/mining/blocks/rewards/${time_period}`); } /** * Block sizes and weights * @description Get average block sizes and weights for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y - * @param {string} time_period + * @param {TimePeriod} time_period * @returns {Promise} */ - async getApiV1MiningBlocksSizesWeightsByTimePeriod(time_period) { + async getV1MiningBlocksSizesWeightsByTimePeriod(time_period) { return this.get(`/api/v1/mining/blocks/sizes-weights/${time_period}`); } /** * Block by timestamp * @description Find the block closest to a given UNIX timestamp. - * @param {string} timestamp + * @param {Timestamp} timestamp * @returns {Promise} */ - async getApiV1MiningBlocksTimestampByTimestamp(timestamp) { + async getV1MiningBlocksTimestamp(timestamp) { return this.get(`/api/v1/mining/blocks/timestamp/${timestamp}`); } @@ -5909,17 +6938,17 @@ class BrkClient extends BrkClientBase { * @description Get historical difficulty adjustments. Returns array of [timestamp, height, difficulty, change_percent]. * @returns {Promise} */ - async getApiV1MiningDifficultyAdjustments() { + async getV1MiningDifficultyAdjustments() { return this.get(`/api/v1/mining/difficulty-adjustments`); } /** * Difficulty adjustments * @description Get historical difficulty adjustments for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y. Returns array of [timestamp, height, difficulty, change_percent]. - * @param {string} time_period + * @param {TimePeriod} time_period * @returns {Promise} */ - async getApiV1MiningDifficultyAdjustmentsByTimePeriod(time_period) { + async getV1MiningDifficultyAdjustmentsByTimePeriod(time_period) { return this.get(`/api/v1/mining/difficulty-adjustments/${time_period}`); } @@ -5928,27 +6957,27 @@ class BrkClient extends BrkClientBase { * @description Get network hashrate and difficulty data for all time. * @returns {Promise} */ - async getApiV1MiningHashrate() { + async getV1MiningHashrate() { return this.get(`/api/v1/mining/hashrate`); } /** * Network hashrate * @description Get network hashrate and difficulty data for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y - * @param {string} time_period + * @param {TimePeriod} time_period * @returns {Promise} */ - async getApiV1MiningHashrateByTimePeriod(time_period) { + async getV1MiningHashrateByTimePeriod(time_period) { return this.get(`/api/v1/mining/hashrate/${time_period}`); } /** * Mining pool details * @description Get detailed information about a specific mining pool including block counts and shares for different time periods. - * @param {string} slug + * @param {PoolSlug} slug * @returns {Promise} */ - async getApiV1MiningPoolBySlug(slug) { + async getV1MiningPoolBySlug(slug) { return this.get(`/api/v1/mining/pool/${slug}`); } @@ -5957,27 +6986,27 @@ class BrkClient extends BrkClientBase { * @description Get list of all known mining pools with their identifiers. * @returns {Promise} */ - async getApiV1MiningPools() { + async getV1MiningPools() { return this.get(`/api/v1/mining/pools`); } /** * Mining pool statistics * @description Get mining pool statistics for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y - * @param {string} time_period + * @param {TimePeriod} time_period * @returns {Promise} */ - async getApiV1MiningPoolsByTimePeriod(time_period) { + async getV1MiningPoolsByTimePeriod(time_period) { return this.get(`/api/v1/mining/pools/${time_period}`); } /** * Mining reward statistics * @description Get mining reward statistics for the last N blocks including total rewards, fees, and transaction count. - * @param {string} block_count Number of recent blocks to include + * @param {number} block_count Number of recent blocks to include * @returns {Promise} */ - async getApiV1MiningRewardStatsByBlockCount(block_count) { + async getV1MiningRewardStatsByBlockCount(block_count) { return this.get(`/api/v1/mining/reward-stats/${block_count}`); } @@ -5987,7 +7016,7 @@ class BrkClient extends BrkClientBase { * @param {string} address Bitcoin address to validate (can be any string) * @returns {Promise} */ - async getApiV1ValidateAddressByAddress(address) { + async getV1ValidateAddress(address) { return this.get(`/api/v1/validate-address/${address}`); } @@ -6011,4 +7040,4 @@ class BrkClient extends BrkClientBase { } -export { BrkClient, BrkClientBase, BrkError, MetricNode }; +export { BrkClient, BrkClientBase, BrkError }; diff --git a/modules/brk-client/package.json b/modules/brk-client/package.json new file mode 100644 index 000000000..0ec7acec2 --- /dev/null +++ b/modules/brk-client/package.json @@ -0,0 +1,33 @@ +{ + "name": "@bitcoinresearchkit/client", + "version": "0.1.0-alpha.1", + "description": "BRK JavaScript client", + "type": "module", + "main": "index.js", + "exports": { + ".": "./index.js" + }, + "files": [ + "index.js", + "generated" + ], + "keywords": [ + "brk", + "bitcoin", + "blockchain", + "research" + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/bitcoinresearchkit/brk.git", + "directory": "modules/brk-client" + }, + "homepage": "https://github.com/bitcoinresearchkit/brk/tree/main/modules/brk-client", + "bugs": { + "url": "https://github.com/bitcoinresearchkit/brk/issues" + }, + "engines": { + "node": ">=18" + } +} diff --git a/modules/brk-resources/jsconfig.json b/modules/brk-resources/jsconfig.json deleted file mode 100644 index cffb39bbd..000000000 --- a/modules/brk-resources/jsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "compilerOptions": { - "checkJs": true, - "strict": true, - "target": "ESNext", - "module": "ESNext", - "skipLibCheck": true - }, - "exclude": ["dist"] -} diff --git a/modules/brk-resources/tsconfig.json b/modules/brk-resources/tsconfig.json deleted file mode 100644 index cfe38122e..000000000 --- a/modules/brk-resources/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "allowJs": true, - "checkJs": true, - "strict": true, - "target": "ESNext", - "module": "ESNext", - "outDir": "/tmp/brk", - "lib": ["DOM", "DOM.Iterable", "ESNext", "WebWorker"], - "skipLibCheck": true - }, - "exclude": ["dist"] -} diff --git a/modules/brk-signals/jsconfig.json b/modules/brk-signals/jsconfig.json deleted file mode 100644 index cffb39bbd..000000000 --- a/modules/brk-signals/jsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "compilerOptions": { - "checkJs": true, - "strict": true, - "target": "ESNext", - "module": "ESNext", - "skipLibCheck": true - }, - "exclude": ["dist"] -} diff --git a/modules/brk-signals/tsconfig.json b/modules/brk-signals/tsconfig.json deleted file mode 100644 index cfe38122e..000000000 --- a/modules/brk-signals/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "allowJs": true, - "checkJs": true, - "strict": true, - "target": "ESNext", - "module": "ESNext", - "outDir": "/tmp/brk", - "lib": ["DOM", "DOM.Iterable", "ESNext", "WebWorker"], - "skipLibCheck": true - }, - "exclude": ["dist"] -} diff --git a/packages/brk_client/.python-version b/packages/brk_client/.python-version new file mode 100644 index 000000000..bd28b9c5c --- /dev/null +++ b/packages/brk_client/.python-version @@ -0,0 +1 @@ +3.9 diff --git a/packages/brk_client/README.md b/packages/brk_client/README.md new file mode 100644 index 000000000..c2f717481 --- /dev/null +++ b/packages/brk_client/README.md @@ -0,0 +1,13 @@ +# brk_client + +Python client for the [Bitcoin Research Kit](https://bitcoinresearchkit.org) - a suite of tools to extract, compute and display data stored on a Bitcoin Core node. + +## Installation + +```bash +pip install brk-client +``` + +## License + +MIT diff --git a/packages/brk_client/__init__.py b/packages/brk_client/__init__.py deleted file mode 100644 index 82df7f0e7..000000000 --- a/packages/brk_client/__init__.py +++ /dev/null @@ -1,3499 +0,0 @@ -# Auto-generated BRK Python client -# Do not edit manually - -from __future__ import annotations -from typing import TypeVar, Generic, Any, Optional, List, Literal, TypedDict, Final -import httpx - -T = TypeVar('T') - -# Constants - -VERSION: Final[str] = "v0.1.0-alpha.1" - -INDEXES: Final[tuple[str, ...]] = ( - "dateindex", - "decadeindex", - "difficultyepoch", - "emptyoutputindex", - "halvingepoch", - "height", - "txinindex", - "monthindex", - "opreturnindex", - "txoutindex", - "p2aaddressindex", - "p2msoutputindex", - "p2pk33addressindex", - "p2pk65addressindex", - "p2pkhaddressindex", - "p2shaddressindex", - "p2traddressindex", - "p2wpkhaddressindex", - "p2wshaddressindex", - "quarterindex", - "semesterindex", - "txindex", - "unknownoutputindex", - "weekindex", - "yearindex", - "loadedaddressindex", - "emptyaddressindex", -) - -POOL_ID_TO_POOL_NAME: Final[dict[str, str]] = { - "pool175btc": "175btc", - "onehash": "1Hash", - "onem1x": "1M1X", - "onethash": "1THash", - "twentyoneinc": "21 Inc.", - "pool50btc": "50BTC", - "fiftyeightcoin": "58COIN", - "sevenpool": "7pool", - "eightbaochi": "8baochi", - "axbt": "A-XBT", - "aaopool": "AAO Pool", - "antpool": "AntPool", - "arkpool": "ArkPool", - "asicminer": "ASICMiner", - "batpool": "BATPOOL", - "bcmonster": "BCMonster", - "bcpoolio": "bcpool.io", - "binancepool": "Binance Pool", - "bitalo": "Bitalo", - "bitclub": "BitClub", - "bitcoinaffiliatenetwork": "Bitcoin Affiliate Network", - "bitcoinindia": "Bitcoin India", - "bitcoinukraine": "Bitcoin-Ukraine", - "bitcoincom": "Bitcoin.com", - "bitcoinrussia": "BitcoinRussia", - "bitfarms": "Bitfarms", - "bitfufupool": "BitFuFuPool", - "bitfury": "BitFury", - "bitminter": "BitMinter", - "bitparking": "Bitparking", - "bitsolo": "Bitsolo", - "bixin": "Bixin", - "blockfills": "BlockFills", - "braiinspool": "Braiins Pool", - "bravomining": "Bravo Mining", - "btcguild": "BTC Guild", - "btcnuggets": "BTC Nuggets", - "btcpoolparty": "BTC Pool Party", - "btccom": "BTC.com", - "btctop": "BTC.TOP", - "btcc": "BTCC", - "btcdig": "BTCDig", - "btclab": "BTCLab", - "btcmp": "BTCMP", - "btcserv": "BTCServ", - "btpool": "BTPOOL", - "bwpool": "BWPool", - "bytepool": "BytePool", - "canoe": "CANOE", - "canoepool": "CanoePool", - "carbonnegative": "Carbon Negative", - "ckpool": "CKPool", - "cloudhashing": "CloudHashing", - "coinlab": "CoinLab", - "cointerra": "Cointerra", - "connectbtc": "ConnectBTC", - "dcex": "DCEX", - "dcexploration": "DCExploration", - "digitalbtc": "digitalBTC", - "digitalxmintsy": "digitalX Mintsy", - "dpool": "DPOOL", - "eclipsemc": "EclipseMC", - "ekanembtc": "EkanemBTC", - "eligius": "Eligius", - "emcdpool": "EMCDPool", - "entrustcharitypool": "Entrust Charity Pool", - "eobot": "Eobot", - "exxbw": "EXX&BW", - "f2pool": "F2Pool", - "foundryusa": "Foundry USA", - "futurebitapollosolo": "FutureBit Apollo Solo", - "gbminers": "GBMiners", - "ghashio": "GHash.IO", - "givemecoins": "Give Me Coins", - "gogreenlight": "GoGreenLight", - "haominer": "haominer", - "haozhuzhu": "HAOZHUZHU", - "hashbx": "HashBX", - "hashpool": "HASHPOOL", - "helix": "Helix", - "hhtt": "HHTT", - "hotpool": "HotPool", - "hummerpool": "Hummerpool", - "huobipool": "Huobi.pool", - "innopolistech": "Innopolis Tech", - "kanopool": "KanoPool", - "kncminer": "KnCMiner", - "kucoinpool": "KuCoinPool", - "lubiancom": "Lubian.com", - "luckypool": "luckyPool", - "luxor": "Luxor", - "marapool": "MARA Pool", - "maxbtc": "MaxBTC", - "maxipool": "MaxiPool", - "megabigpower": "MegaBigPower", - "minerium": "Minerium", - "miningsquared": "Mining Squared", - "miningdutch": "Mining-Dutch", - "miningcity": "MiningCity", - "miningkings": "MiningKings", - "mmpool": "mmpool", - "mtred": "Mt Red", - "multicoinco": "MultiCoin.co", - "multipool": "Multipool", - "mybtccoinpool": "myBTCcoin Pool", - "neopool": "Neopool", - "nexious": "Nexious", - "nicehash": "NiceHash", - "nmcbit": "NMCbit", - "novablock": "NovaBlock", - "ocean": "OCEAN", - "okexpool": "OKExPool", - "okkong": "OKKONG", - "okminer": "OKMINER", - "okpooltop": "okpool.top", - "ozcoin": "OzCoin", - "parasite": "Parasite", - "patels": "Patels", - "pegapool": "PEGA Pool", - "phashio": "PHash.IO", - "phoenix": "Phoenix", - "polmine": "Polmine", - "poolin": "Poolin", - "portlandhodl": "Portland.HODL", - "publicpool": "Public Pool", - "purebtccom": "PureBTC.COM", - "rawpool": "Rawpool", - "rigpool": "RigPool", - "sbicrypto": "SBI Crypto", - "secpool": "SECPOOL", - "secretsuperstar": "SecretSuperstar", - "shawnp0wers": "shawnp0wers", - "sigmapoolcom": "Sigmapool.com", - "simplecoinus": "simplecoin.us", - "solock": "Solo CK", - "spiderpool": "SpiderPool", - "stminingcorp": "ST Mining Corp", - "tangpool": "Tangpool", - "tatmaspool": "TATMAS Pool", - "tbdice": "TBDice", - "telco214": "Telco 214", - "terrapool": "Terra Pool", - "tiger": "tiger", - "tigerpoolnet": "tigerpool.net", - "titan": "Titan", - "transactioncoinmining": "transactioncoinmining", - "trickysbtcpool": "Tricky's BTC Pool", - "triplemining": "TripleMining", - "ultimuspool": "ULTIMUSPOOL", - "unknown": "Unknown", - "unomp": "UNOMP", - "viabtc": "ViaBTC", - "waterhole": "Waterhole", - "wayicn": "WAYI.CN", - "whitepool": "WhitePool", - "wk057": "wk057", - "yourbtcnet": "Yourbtc.net", - "zulupool": "Zulupool", -} - -# Cohort names - -TERM_NAMES: Final = { - "short": { - "id": "sth", - "short": "STH", - "long": "Short Term Holders" - }, - "long": { - "id": "lth", - "short": "LTH", - "long": "Long Term Holders" - } -} - -EPOCH_NAMES: Final = { - "_0": { - "id": "epoch_0", - "short": "Epoch 0", - "long": "Epoch 0" - }, - "_1": { - "id": "epoch_1", - "short": "Epoch 1", - "long": "Epoch 1" - }, - "_2": { - "id": "epoch_2", - "short": "Epoch 2", - "long": "Epoch 2" - }, - "_3": { - "id": "epoch_3", - "short": "Epoch 3", - "long": "Epoch 3" - }, - "_4": { - "id": "epoch_4", - "short": "Epoch 4", - "long": "Epoch 4" - } -} - -YEAR_NAMES: Final = { - "_2009": { - "id": "year_2009", - "short": "2009", - "long": "Year 2009" - }, - "_2010": { - "id": "year_2010", - "short": "2010", - "long": "Year 2010" - }, - "_2011": { - "id": "year_2011", - "short": "2011", - "long": "Year 2011" - }, - "_2012": { - "id": "year_2012", - "short": "2012", - "long": "Year 2012" - }, - "_2013": { - "id": "year_2013", - "short": "2013", - "long": "Year 2013" - }, - "_2014": { - "id": "year_2014", - "short": "2014", - "long": "Year 2014" - }, - "_2015": { - "id": "year_2015", - "short": "2015", - "long": "Year 2015" - }, - "_2016": { - "id": "year_2016", - "short": "2016", - "long": "Year 2016" - }, - "_2017": { - "id": "year_2017", - "short": "2017", - "long": "Year 2017" - }, - "_2018": { - "id": "year_2018", - "short": "2018", - "long": "Year 2018" - }, - "_2019": { - "id": "year_2019", - "short": "2019", - "long": "Year 2019" - }, - "_2020": { - "id": "year_2020", - "short": "2020", - "long": "Year 2020" - }, - "_2021": { - "id": "year_2021", - "short": "2021", - "long": "Year 2021" - }, - "_2022": { - "id": "year_2022", - "short": "2022", - "long": "Year 2022" - }, - "_2023": { - "id": "year_2023", - "short": "2023", - "long": "Year 2023" - }, - "_2024": { - "id": "year_2024", - "short": "2024", - "long": "Year 2024" - }, - "_2025": { - "id": "year_2025", - "short": "2025", - "long": "Year 2025" - }, - "_2026": { - "id": "year_2026", - "short": "2026", - "long": "Year 2026" - } -} - -SPENDABLE_TYPE_NAMES: Final = { - "p2pk65": { - "id": "p2pk65", - "short": "P2PK65", - "long": "Pay to Public Key (65 bytes)" - }, - "p2pk33": { - "id": "p2pk33", - "short": "P2PK33", - "long": "Pay to Public Key (33 bytes)" - }, - "p2pkh": { - "id": "p2pkh", - "short": "P2PKH", - "long": "Pay to Public Key Hash" - }, - "p2ms": { - "id": "p2ms", - "short": "P2MS", - "long": "Pay to Multisig" - }, - "p2sh": { - "id": "p2sh", - "short": "P2SH", - "long": "Pay to Script Hash" - }, - "p2wpkh": { - "id": "p2wpkh", - "short": "P2WPKH", - "long": "Pay to Witness Public Key Hash" - }, - "p2wsh": { - "id": "p2wsh", - "short": "P2WSH", - "long": "Pay to Witness Script Hash" - }, - "p2tr": { - "id": "p2tr", - "short": "P2TR", - "long": "Pay to Taproot" - }, - "p2a": { - "id": "p2a", - "short": "P2A", - "long": "Pay to Anchor" - }, - "unknown": { - "id": "unknown_outputs", - "short": "Unknown", - "long": "Unknown Output Type" - }, - "empty": { - "id": "empty_outputs", - "short": "Empty", - "long": "Empty Output" - } -} - -AGE_RANGE_NAMES: Final = { - "up_to_1d": { - "id": "up_to_1d_old", - "short": "<1d", - "long": "Up to 1 Day Old" - }, - "_1d_to_1w": { - "id": "at_least_1d_up_to_1w_old", - "short": "1d-1w", - "long": "1 Day to 1 Week Old" - }, - "_1w_to_1m": { - "id": "at_least_1w_up_to_1m_old", - "short": "1w-1m", - "long": "1 Week to 1 Month Old" - }, - "_1m_to_2m": { - "id": "at_least_1m_up_to_2m_old", - "short": "1m-2m", - "long": "1 to 2 Months Old" - }, - "_2m_to_3m": { - "id": "at_least_2m_up_to_3m_old", - "short": "2m-3m", - "long": "2 to 3 Months Old" - }, - "_3m_to_4m": { - "id": "at_least_3m_up_to_4m_old", - "short": "3m-4m", - "long": "3 to 4 Months Old" - }, - "_4m_to_5m": { - "id": "at_least_4m_up_to_5m_old", - "short": "4m-5m", - "long": "4 to 5 Months Old" - }, - "_5m_to_6m": { - "id": "at_least_5m_up_to_6m_old", - "short": "5m-6m", - "long": "5 to 6 Months Old" - }, - "_6m_to_1y": { - "id": "at_least_6m_up_to_1y_old", - "short": "6m-1y", - "long": "6 Months to 1 Year Old" - }, - "_1y_to_2y": { - "id": "at_least_1y_up_to_2y_old", - "short": "1y-2y", - "long": "1 to 2 Years Old" - }, - "_2y_to_3y": { - "id": "at_least_2y_up_to_3y_old", - "short": "2y-3y", - "long": "2 to 3 Years Old" - }, - "_3y_to_4y": { - "id": "at_least_3y_up_to_4y_old", - "short": "3y-4y", - "long": "3 to 4 Years Old" - }, - "_4y_to_5y": { - "id": "at_least_4y_up_to_5y_old", - "short": "4y-5y", - "long": "4 to 5 Years Old" - }, - "_5y_to_6y": { - "id": "at_least_5y_up_to_6y_old", - "short": "5y-6y", - "long": "5 to 6 Years Old" - }, - "_6y_to_7y": { - "id": "at_least_6y_up_to_7y_old", - "short": "6y-7y", - "long": "6 to 7 Years Old" - }, - "_7y_to_8y": { - "id": "at_least_7y_up_to_8y_old", - "short": "7y-8y", - "long": "7 to 8 Years Old" - }, - "_8y_to_10y": { - "id": "at_least_8y_up_to_10y_old", - "short": "8y-10y", - "long": "8 to 10 Years Old" - }, - "_10y_to_12y": { - "id": "at_least_10y_up_to_12y_old", - "short": "10y-12y", - "long": "10 to 12 Years Old" - }, - "_12y_to_15y": { - "id": "at_least_12y_up_to_15y_old", - "short": "12y-15y", - "long": "12 to 15 Years Old" - }, - "from_15y": { - "id": "at_least_15y_old", - "short": "15y+", - "long": "15+ Years Old" - } -} - -MAX_AGE_NAMES: Final = { - "_1w": { - "id": "up_to_1w_old", - "short": "<1w", - "long": "Up to 1 Week Old" - }, - "_1m": { - "id": "up_to_1m_old", - "short": "<1m", - "long": "Up to 1 Month Old" - }, - "_2m": { - "id": "up_to_2m_old", - "short": "<2m", - "long": "Up to 2 Months Old" - }, - "_3m": { - "id": "up_to_3m_old", - "short": "<3m", - "long": "Up to 3 Months Old" - }, - "_4m": { - "id": "up_to_4m_old", - "short": "<4m", - "long": "Up to 4 Months Old" - }, - "_5m": { - "id": "up_to_5m_old", - "short": "<5m", - "long": "Up to 5 Months Old" - }, - "_6m": { - "id": "up_to_6m_old", - "short": "<6m", - "long": "Up to 6 Months Old" - }, - "_1y": { - "id": "up_to_1y_old", - "short": "<1y", - "long": "Up to 1 Year Old" - }, - "_2y": { - "id": "up_to_2y_old", - "short": "<2y", - "long": "Up to 2 Years Old" - }, - "_3y": { - "id": "up_to_3y_old", - "short": "<3y", - "long": "Up to 3 Years Old" - }, - "_4y": { - "id": "up_to_4y_old", - "short": "<4y", - "long": "Up to 4 Years Old" - }, - "_5y": { - "id": "up_to_5y_old", - "short": "<5y", - "long": "Up to 5 Years Old" - }, - "_6y": { - "id": "up_to_6y_old", - "short": "<6y", - "long": "Up to 6 Years Old" - }, - "_7y": { - "id": "up_to_7y_old", - "short": "<7y", - "long": "Up to 7 Years Old" - }, - "_8y": { - "id": "up_to_8y_old", - "short": "<8y", - "long": "Up to 8 Years Old" - }, - "_10y": { - "id": "up_to_10y_old", - "short": "<10y", - "long": "Up to 10 Years Old" - }, - "_12y": { - "id": "up_to_12y_old", - "short": "<12y", - "long": "Up to 12 Years Old" - }, - "_15y": { - "id": "up_to_15y_old", - "short": "<15y", - "long": "Up to 15 Years Old" - } -} - -MIN_AGE_NAMES: Final = { - "_1d": { - "id": "at_least_1d_old", - "short": "1d+", - "long": "At Least 1 Day Old" - }, - "_1w": { - "id": "at_least_1w_old", - "short": "1w+", - "long": "At Least 1 Week Old" - }, - "_1m": { - "id": "at_least_1m_old", - "short": "1m+", - "long": "At Least 1 Month Old" - }, - "_2m": { - "id": "at_least_2m_old", - "short": "2m+", - "long": "At Least 2 Months Old" - }, - "_3m": { - "id": "at_least_3m_old", - "short": "3m+", - "long": "At Least 3 Months Old" - }, - "_4m": { - "id": "at_least_4m_old", - "short": "4m+", - "long": "At Least 4 Months Old" - }, - "_5m": { - "id": "at_least_5m_old", - "short": "5m+", - "long": "At Least 5 Months Old" - }, - "_6m": { - "id": "at_least_6m_old", - "short": "6m+", - "long": "At Least 6 Months Old" - }, - "_1y": { - "id": "at_least_1y_old", - "short": "1y+", - "long": "At Least 1 Year Old" - }, - "_2y": { - "id": "at_least_2y_old", - "short": "2y+", - "long": "At Least 2 Years Old" - }, - "_3y": { - "id": "at_least_3y_old", - "short": "3y+", - "long": "At Least 3 Years Old" - }, - "_4y": { - "id": "at_least_4y_old", - "short": "4y+", - "long": "At Least 4 Years Old" - }, - "_5y": { - "id": "at_least_5y_old", - "short": "5y+", - "long": "At Least 5 Years Old" - }, - "_6y": { - "id": "at_least_6y_old", - "short": "6y+", - "long": "At Least 6 Years Old" - }, - "_7y": { - "id": "at_least_7y_old", - "short": "7y+", - "long": "At Least 7 Years Old" - }, - "_8y": { - "id": "at_least_8y_old", - "short": "8y+", - "long": "At Least 8 Years Old" - }, - "_10y": { - "id": "at_least_10y_old", - "short": "10y+", - "long": "At Least 10 Years Old" - }, - "_12y": { - "id": "at_least_12y_old", - "short": "12y+", - "long": "At Least 12 Years Old" - } -} - -AMOUNT_RANGE_NAMES: Final = { - "_0sats": { - "id": "with_0sats", - "short": "0 sats", - "long": "0 Sats" - }, - "_1sat_to_10sats": { - "id": "above_1sat_under_10sats", - "short": "1-10 sats", - "long": "1 to 10 Sats" - }, - "_10sats_to_100sats": { - "id": "above_10sats_under_100sats", - "short": "10-100 sats", - "long": "10 to 100 Sats" - }, - "_100sats_to_1k_sats": { - "id": "above_100sats_under_1k_sats", - "short": "100-1k sats", - "long": "100 to 1K Sats" - }, - "_1k_sats_to_10k_sats": { - "id": "above_1k_sats_under_10k_sats", - "short": "1k-10k sats", - "long": "1K to 10K Sats" - }, - "_10k_sats_to_100k_sats": { - "id": "above_10k_sats_under_100k_sats", - "short": "10k-100k sats", - "long": "10K to 100K Sats" - }, - "_100k_sats_to_1m_sats": { - "id": "above_100k_sats_under_1m_sats", - "short": "100k-1M sats", - "long": "100K to 1M Sats" - }, - "_1m_sats_to_10m_sats": { - "id": "above_1m_sats_under_10m_sats", - "short": "1M-10M sats", - "long": "1M to 10M Sats" - }, - "_10m_sats_to_1btc": { - "id": "above_10m_sats_under_1btc", - "short": "0.1-1 BTC", - "long": "0.1 to 1 BTC" - }, - "_1btc_to_10btc": { - "id": "above_1btc_under_10btc", - "short": "1-10 BTC", - "long": "1 to 10 BTC" - }, - "_10btc_to_100btc": { - "id": "above_10btc_under_100btc", - "short": "10-100 BTC", - "long": "10 to 100 BTC" - }, - "_100btc_to_1k_btc": { - "id": "above_100btc_under_1k_btc", - "short": "100-1k BTC", - "long": "100 to 1K BTC" - }, - "_1k_btc_to_10k_btc": { - "id": "above_1k_btc_under_10k_btc", - "short": "1k-10k BTC", - "long": "1K to 10K BTC" - }, - "_10k_btc_to_100k_btc": { - "id": "above_10k_btc_under_100k_btc", - "short": "10k-100k BTC", - "long": "10K to 100K BTC" - }, - "_100k_btc_or_more": { - "id": "above_100k_btc", - "short": "100k+ BTC", - "long": "100K+ BTC" - } -} - -GE_AMOUNT_NAMES: Final = { - "_1sat": { - "id": "above_1sat", - "short": "1+ sats", - "long": "Above 1 Sat" - }, - "_10sats": { - "id": "above_10sats", - "short": "10+ sats", - "long": "Above 10 Sats" - }, - "_100sats": { - "id": "above_100sats", - "short": "100+ sats", - "long": "Above 100 Sats" - }, - "_1k_sats": { - "id": "above_1k_sats", - "short": "1k+ sats", - "long": "Above 1K Sats" - }, - "_10k_sats": { - "id": "above_10k_sats", - "short": "10k+ sats", - "long": "Above 10K Sats" - }, - "_100k_sats": { - "id": "above_100k_sats", - "short": "100k+ sats", - "long": "Above 100K Sats" - }, - "_1m_sats": { - "id": "above_1m_sats", - "short": "1M+ sats", - "long": "Above 1M Sats" - }, - "_10m_sats": { - "id": "above_10m_sats", - "short": "0.1+ BTC", - "long": "Above 0.1 BTC" - }, - "_1btc": { - "id": "above_1btc", - "short": "1+ BTC", - "long": "Above 1 BTC" - }, - "_10btc": { - "id": "above_10btc", - "short": "10+ BTC", - "long": "Above 10 BTC" - }, - "_100btc": { - "id": "above_100btc", - "short": "100+ BTC", - "long": "Above 100 BTC" - }, - "_1k_btc": { - "id": "above_1k_btc", - "short": "1k+ BTC", - "long": "Above 1K BTC" - }, - "_10k_btc": { - "id": "above_10k_btc", - "short": "10k+ BTC", - "long": "Above 10K BTC" - } -} - -LT_AMOUNT_NAMES: Final = { - "_10sats": { - "id": "under_10sats", - "short": "<10 sats", - "long": "Under 10 Sats" - }, - "_100sats": { - "id": "under_100sats", - "short": "<100 sats", - "long": "Under 100 Sats" - }, - "_1k_sats": { - "id": "under_1k_sats", - "short": "<1k sats", - "long": "Under 1K Sats" - }, - "_10k_sats": { - "id": "under_10k_sats", - "short": "<10k sats", - "long": "Under 10K Sats" - }, - "_100k_sats": { - "id": "under_100k_sats", - "short": "<100k sats", - "long": "Under 100K Sats" - }, - "_1m_sats": { - "id": "under_1m_sats", - "short": "<1M sats", - "long": "Under 1M Sats" - }, - "_10m_sats": { - "id": "under_10m_sats", - "short": "<0.1 BTC", - "long": "Under 0.1 BTC" - }, - "_1btc": { - "id": "under_1btc", - "short": "<1 BTC", - "long": "Under 1 BTC" - }, - "_10btc": { - "id": "under_10btc", - "short": "<10 BTC", - "long": "Under 10 BTC" - }, - "_100btc": { - "id": "under_100btc", - "short": "<100 BTC", - "long": "Under 100 BTC" - }, - "_1k_btc": { - "id": "under_1k_btc", - "short": "<1k BTC", - "long": "Under 1K BTC" - }, - "_10k_btc": { - "id": "under_10k_btc", - "short": "<10k BTC", - "long": "Under 10K BTC" - }, - "_100k_btc": { - "id": "under_100k_btc", - "short": "<100k BTC", - "long": "Under 100K BTC" - } -} - -# Type definitions - -Address = str -Sats = int -TypeIndex = int -class AddressChainStats(TypedDict): - funded_txo_count: int - funded_txo_sum: Sats - spent_txo_count: int - spent_txo_sum: Sats - tx_count: int - type_index: TypeIndex - -class AddressMempoolStats(TypedDict): - funded_txo_count: int - funded_txo_sum: Sats - spent_txo_count: int - spent_txo_sum: Sats - tx_count: int - -class AddressParam(TypedDict): - address: Address - -class AddressStats(TypedDict): - address: Address - chain_stats: AddressChainStats - mempool_stats: AddressMempoolStats | None - -Txid = str -class AddressTxidsParam(TypedDict): - after_txid: Txid | None - limit: int - -class AddressValidation(TypedDict): - isvalid: bool - address: Optional[str] - scriptPubKey: Optional[str] - isscript: Optional[bool] - iswitness: Optional[bool] - witness_version: Optional[int] - witness_program: Optional[str] - -AnyAddressIndex = TypeIndex -Bitcoin = float -BlkPosition = int -class BlockCountParam(TypedDict): - block_count: int - -Height = int -Timestamp = int -class BlockFeesEntry(TypedDict): - avgHeight: Height - timestamp: Timestamp - avgFees: Sats - -BlockHash = str -class BlockHashParam(TypedDict): - hash: BlockHash - -TxIndex = int -class BlockHashStartIndex(TypedDict): - hash: BlockHash - start_index: TxIndex - -class BlockHashTxIndex(TypedDict): - hash: BlockHash - index: TxIndex - -Weight = int -class BlockInfo(TypedDict): - id: BlockHash - height: Height - tx_count: int - size: int - weight: Weight - timestamp: Timestamp - difficulty: float - -class BlockRewardsEntry(TypedDict): - avgHeight: int - timestamp: int - avgRewards: int - -class BlockSizeEntry(TypedDict): - avgHeight: int - timestamp: int - avgSize: int - -class BlockWeightEntry(TypedDict): - avgHeight: int - timestamp: int - avgWeight: int - -class BlockSizesWeights(TypedDict): - sizes: List[BlockSizeEntry] - weights: List[BlockWeightEntry] - -class BlockStatus(TypedDict): - in_best_chain: bool - height: Height | None - next_best: BlockHash | None - -class BlockTimestamp(TypedDict): - height: Height - hash: BlockHash - timestamp: str - -Cents = int -Close = Cents -Format = Literal["json", "csv"] -class DataRangeFormat(TypedDict): - from_: Optional[int] - to: Optional[int] - count: Optional[int] - format: Format - -Date = int -DateIndex = int -DecadeIndex = int -class DifficultyAdjustment(TypedDict): - progressPercent: float - difficultyChange: float - estimatedRetargetDate: int - remainingBlocks: int - remainingTime: int - previousRetarget: float - nextRetargetHeight: Height - timeAvg: int - adjustedTimeAvg: int - timeOffset: int - -class DifficultyAdjustmentEntry(TypedDict): - timestamp: Timestamp - height: Height - difficulty: float - change_percent: float - -class DifficultyEntry(TypedDict): - timestamp: Timestamp - difficulty: float - height: Height - -DifficultyEpoch = int -Dollars = float -class EmptyAddressData(TypedDict): - tx_count: int - funded_txo_count: int - transfered: Sats - -EmptyAddressIndex = TypeIndex -EmptyOutputIndex = TypeIndex -FeeRate = float -HalvingEpoch = int -class HashrateEntry(TypedDict): - timestamp: Timestamp - avgHashrate: int - -class HashrateSummary(TypedDict): - hashrates: List[HashrateEntry] - difficulty: List[DifficultyEntry] - currentHashrate: int - currentDifficulty: float - -class Health(TypedDict): - status: str - service: str - timestamp: str - -class HeightParam(TypedDict): - height: Height - -Hex = str -High = Cents -class IndexInfo(TypedDict): - index: Index - aliases: List[str] - -Limit = int -class LimitParam(TypedDict): - limit: Limit - -class LoadedAddressData(TypedDict): - tx_count: int - funded_txo_count: int - spent_txo_count: int - received: Sats - sent: Sats - realized_cap: Dollars - -LoadedAddressIndex = TypeIndex -Low = Cents -class MempoolBlock(TypedDict): - blockSize: int - blockVSize: float - nTx: int - totalFees: Sats - medianFee: FeeRate - feeRange: List[FeeRate] - -VSize = int -class MempoolInfo(TypedDict): - count: int - vsize: VSize - total_fee: Sats - -Metric = str -class MetricCount(TypedDict): - distinct_metrics: int - total_endpoints: int - lazy_endpoints: int - stored_endpoints: int - -class MetricData(TypedDict): - total: int - from_: int - to: int - data: List[Any] - -class MetricParam(TypedDict): - metric: Metric - -Metrics = str -class MetricSelection(TypedDict): - metrics: Metrics - index: Index - from_: Optional[int] - to: Optional[int] - count: Optional[int] - format: Format - -class MetricSelectionLegacy(TypedDict): - index: Index - ids: Metrics - from_: Optional[int] - to: Optional[int] - count: Optional[int] - format: Format - -class MetricWithIndex(TypedDict): - metric: Metric - index: Index - -MonthIndex = int -Open = Cents -class OHLCCents(TypedDict): - open: Open - high: High - low: Low - close: Close - -class OHLCDollars(TypedDict): - open: Open - high: High - low: Low - close: Close - -class OHLCSats(TypedDict): - open: Open - high: High - low: Low - close: Close - -OpReturnIndex = TypeIndex -OutPoint = int -OutputType = Literal["p2pk65", "p2pk33", "p2pkh", "p2ms", "p2sh", "opreturn", "p2wpkh", "p2wsh", "p2tr", "p2a", "empty", "unknown"] -P2AAddressIndex = TypeIndex -U8x2 = List[int] -P2ABytes = U8x2 -P2MSOutputIndex = TypeIndex -P2PK33AddressIndex = TypeIndex -U8x33 = str -P2PK33Bytes = U8x33 -P2PK65AddressIndex = TypeIndex -U8x65 = str -P2PK65Bytes = U8x65 -P2PKHAddressIndex = TypeIndex -U8x20 = List[int] -P2PKHBytes = U8x20 -P2SHAddressIndex = TypeIndex -P2SHBytes = U8x20 -P2TRAddressIndex = TypeIndex -U8x32 = List[int] -P2TRBytes = U8x32 -P2WPKHAddressIndex = TypeIndex -P2WPKHBytes = U8x20 -P2WSHAddressIndex = TypeIndex -P2WSHBytes = U8x32 -class PaginatedMetrics(TypedDict): - current_page: int - max_page: int - metrics: List[str] - -class Pagination(TypedDict): - page: Optional[int] - -class PoolBlockCounts(TypedDict): - all: int - _24h: int - _1w: int - -class PoolBlockShares(TypedDict): - all: float - _24h: float - _1w: float - -PoolSlug = Literal["unknown", "blockfills", "ultimuspool", "terrapool", "luxor", "onethash", "btccom", "bitfarms", "huobipool", "wayicn", "canoepool", "btctop", "bitcoincom", "pool175btc", "gbminers", "axbt", "asicminer", "bitminter", "bitcoinrussia", "btcserv", "simplecoinus", "btcguild", "eligius", "ozcoin", "eclipsemc", "maxbtc", "triplemining", "coinlab", "pool50btc", "ghashio", "stminingcorp", "bitparking", "mmpool", "polmine", "kncminer", "bitalo", "f2pool", "hhtt", "megabigpower", "mtred", "nmcbit", "yourbtcnet", "givemecoins", "braiinspool", "antpool", "multicoinco", "bcpoolio", "cointerra", "kanopool", "solock", "ckpool", "nicehash", "bitclub", "bitcoinaffiliatenetwork", "btcc", "bwpool", "exxbw", "bitsolo", "bitfury", "twentyoneinc", "digitalbtc", "eightbaochi", "mybtccoinpool", "tbdice", "hashpool", "nexious", "bravomining", "hotpool", "okexpool", "bcmonster", "onehash", "bixin", "tatmaspool", "viabtc", "connectbtc", "batpool", "waterhole", "dcexploration", "dcex", "btpool", "fiftyeightcoin", "bitcoinindia", "shawnp0wers", "phashio", "rigpool", "haozhuzhu", "sevenpool", "miningkings", "hashbx", "dpool", "rawpool", "haominer", "helix", "bitcoinukraine", "poolin", "secretsuperstar", "tigerpoolnet", "sigmapoolcom", "okpooltop", "hummerpool", "tangpool", "bytepool", "spiderpool", "novablock", "miningcity", "binancepool", "minerium", "lubiancom", "okkong", "aaopool", "emcdpool", "foundryusa", "sbicrypto", "arkpool", "purebtccom", "marapool", "kucoinpool", "entrustcharitypool", "okminer", "titan", "pegapool", "btcnuggets", "cloudhashing", "digitalxmintsy", "telco214", "btcpoolparty", "multipool", "transactioncoinmining", "btcdig", "trickysbtcpool", "btcmp", "eobot", "unomp", "patels", "gogreenlight", "ekanembtc", "canoe", "tiger", "onem1x", "zulupool", "secpool", "ocean", "whitepool", "wk057", "futurebitapollosolo", "carbonnegative", "portlandhodl", "phoenix", "neopool", "maxipool", "bitfufupool", "luckypool", "miningdutch", "publicpool", "miningsquared", "innopolistech", "btclab", "parasite"] -class PoolDetailInfo(TypedDict): - id: int - name: str - link: str - addresses: List[str] - regexes: List[str] - slug: PoolSlug - -class PoolDetail(TypedDict): - pool: PoolDetailInfo - blockCount: PoolBlockCounts - blockShare: PoolBlockShares - estimatedHashrate: int - reportedHashrate: Optional[int] - -class PoolInfo(TypedDict): - name: str - slug: PoolSlug - unique_id: int - -class PoolSlugParam(TypedDict): - slug: PoolSlug - -class PoolStats(TypedDict): - poolId: int - name: str - link: str - blockCount: int - rank: int - emptyBlocks: int - slug: PoolSlug - share: float - -class PoolsSummary(TypedDict): - pools: List[PoolStats] - blockCount: int - lastEstimatedHashrate: int - -QuarterIndex = int -RawLockTime = int -class RecommendedFees(TypedDict): - fastestFee: FeeRate - halfHourFee: FeeRate - hourFee: FeeRate - economyFee: FeeRate - minimumFee: FeeRate - -class RewardStats(TypedDict): - startBlock: Height - endBlock: Height - totalReward: Sats - totalFee: Sats - totalTx: int - -SemesterIndex = int -StoredBool = int -StoredF32 = float -StoredF64 = float -StoredI16 = int -StoredU16 = int -StoredU32 = int -StoredU64 = int -class SupplyState(TypedDict): - utxo_count: int - value: Sats - -TimePeriod = Literal["24h", "3d", "1w", "1m", "3m", "6m", "1y", "2y", "3y"] -class TimePeriodParam(TypedDict): - time_period: TimePeriod - -class TimestampParam(TypedDict): - timestamp: Timestamp - -class TxOut(TypedDict): - scriptpubkey: str - value: Sats - -Vout = int -class TxIn(TypedDict): - txid: Txid - vout: Vout - prevout: TxOut | None - scriptsig: str - scriptsig_asm: str - is_coinbase: bool - sequence: int - inner_redeemscript_asm: Optional[str] - -class TxStatus(TypedDict): - confirmed: bool - block_height: Height | None - block_hash: BlockHash | None - block_time: Timestamp | None - -TxVersion = int -class Transaction(TypedDict): - index: TxIndex | None - txid: Txid - version: TxVersion - locktime: RawLockTime - size: int - weight: Weight - sigops: int - fee: Sats - vin: List[TxIn] - vout: List[TxOut] - status: TxStatus - -TxInIndex = int -TxOutIndex = int -Vin = int -class TxOutspend(TypedDict): - spent: bool - txid: Txid | None - vin: Vin | None - status: TxStatus | None - -class TxidParam(TypedDict): - txid: Txid - -class TxidVout(TypedDict): - txid: Txid - vout: Vout - -UnknownOutputIndex = TypeIndex -class Utxo(TypedDict): - txid: Txid - vout: Vout - status: TxStatus - value: Sats - -class ValidateAddressParam(TypedDict): - address: str - -WeekIndex = int -YearIndex = int -Index = Literal["dateindex", "decadeindex", "difficultyepoch", "emptyoutputindex", "halvingepoch", "height", "txinindex", "monthindex", "opreturnindex", "txoutindex", "p2aaddressindex", "p2msoutputindex", "p2pk33addressindex", "p2pk65addressindex", "p2pkhaddressindex", "p2shaddressindex", "p2traddressindex", "p2wpkhaddressindex", "p2wshaddressindex", "quarterindex", "semesterindex", "txindex", "unknownoutputindex", "weekindex", "yearindex", "loadedaddressindex", "emptyaddressindex"] -class MetricLeafWithSchema(TypedDict): - name: str - value_type: str - indexes: List[Index] - -TreeNode = dict[str, "TreeNode"] | MetricLeafWithSchema - -class BrkError(Exception): - """Custom error class for BRK client errors.""" - - def __init__(self, message: str, status: Optional[int] = None): - super().__init__(message) - self.status = status - - -class BrkClientBase: - """Base HTTP client for making requests.""" - - def __init__(self, base_url: str, timeout: float = 30.0): - self.base_url = base_url - self.timeout = timeout - self._client = httpx.Client(timeout=timeout) - - def get(self, path: str) -> Any: - """Make a GET request.""" - try: - response = self._client.get(f"{self.base_url}{path}") - response.raise_for_status() - return response.json() - except httpx.HTTPStatusError as e: - raise BrkError(f"HTTP error: {e.response.status_code}", e.response.status_code) - except httpx.RequestError as e: - raise BrkError(str(e)) - - def close(self): - """Close the HTTP client.""" - self._client.close() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - - -class MetricNode(Generic[T]): - """A metric node that can fetch data for different indexes.""" - - def __init__(self, client: BrkClientBase, path: str): - self._client = client - self._path = path - - def get(self) -> List[T]: - """Fetch all data points for this metric.""" - return self._client.get(self._path) - - def get_range(self, from_val: Optional[str] = None, to_val: Optional[str] = None) -> List[T]: - """Fetch data points within a range.""" - params = [] - if from_val is not None: - params.append(f"from={from_val}") - if to_val is not None: - params.append(f"to={to_val}") - query = "&".join(params) - return self._client.get(f"{self._path}?{query}" if query else self._path) - - -# Index accessor classes - -class Indexes3(Generic[T]): - """Index accessor for metrics with 9 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_dateindex: MetricNode[T] = MetricNode(client, f'{base_path}/dateindex') - self.by_decadeindex: MetricNode[T] = MetricNode(client, f'{base_path}/decadeindex') - self.by_difficultyepoch: MetricNode[T] = MetricNode(client, f'{base_path}/difficultyepoch') - self.by_height: MetricNode[T] = MetricNode(client, f'{base_path}/height') - self.by_monthindex: MetricNode[T] = MetricNode(client, f'{base_path}/monthindex') - self.by_quarterindex: MetricNode[T] = MetricNode(client, f'{base_path}/quarterindex') - self.by_semesterindex: MetricNode[T] = MetricNode(client, f'{base_path}/semesterindex') - self.by_weekindex: MetricNode[T] = MetricNode(client, f'{base_path}/weekindex') - self.by_yearindex: MetricNode[T] = MetricNode(client, f'{base_path}/yearindex') - -class Indexes4(Generic[T]): - """Index accessor for metrics with 8 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_dateindex: MetricNode[T] = MetricNode(client, f'{base_path}/dateindex') - self.by_decadeindex: MetricNode[T] = MetricNode(client, f'{base_path}/decadeindex') - self.by_difficultyepoch: MetricNode[T] = MetricNode(client, f'{base_path}/difficultyepoch') - self.by_monthindex: MetricNode[T] = MetricNode(client, f'{base_path}/monthindex') - self.by_quarterindex: MetricNode[T] = MetricNode(client, f'{base_path}/quarterindex') - self.by_semesterindex: MetricNode[T] = MetricNode(client, f'{base_path}/semesterindex') - self.by_weekindex: MetricNode[T] = MetricNode(client, f'{base_path}/weekindex') - self.by_yearindex: MetricNode[T] = MetricNode(client, f'{base_path}/yearindex') - -class Indexes26(Generic[T]): - """Index accessor for metrics with 8 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_dateindex: MetricNode[T] = MetricNode(client, f'{base_path}/dateindex') - self.by_decadeindex: MetricNode[T] = MetricNode(client, f'{base_path}/decadeindex') - self.by_height: MetricNode[T] = MetricNode(client, f'{base_path}/height') - self.by_monthindex: MetricNode[T] = MetricNode(client, f'{base_path}/monthindex') - self.by_quarterindex: MetricNode[T] = MetricNode(client, f'{base_path}/quarterindex') - self.by_semesterindex: MetricNode[T] = MetricNode(client, f'{base_path}/semesterindex') - self.by_weekindex: MetricNode[T] = MetricNode(client, f'{base_path}/weekindex') - self.by_yearindex: MetricNode[T] = MetricNode(client, f'{base_path}/yearindex') - -class Indexes(Generic[T]): - """Index accessor for metrics with 7 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_dateindex: MetricNode[T] = MetricNode(client, f'{base_path}/dateindex') - self.by_decadeindex: MetricNode[T] = MetricNode(client, f'{base_path}/decadeindex') - self.by_monthindex: MetricNode[T] = MetricNode(client, f'{base_path}/monthindex') - self.by_quarterindex: MetricNode[T] = MetricNode(client, f'{base_path}/quarterindex') - self.by_semesterindex: MetricNode[T] = MetricNode(client, f'{base_path}/semesterindex') - self.by_weekindex: MetricNode[T] = MetricNode(client, f'{base_path}/weekindex') - self.by_yearindex: MetricNode[T] = MetricNode(client, f'{base_path}/yearindex') - -class Indexes27(Generic[T]): - """Index accessor for metrics with 7 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_decadeindex: MetricNode[T] = MetricNode(client, f'{base_path}/decadeindex') - self.by_height: MetricNode[T] = MetricNode(client, f'{base_path}/height') - self.by_monthindex: MetricNode[T] = MetricNode(client, f'{base_path}/monthindex') - self.by_quarterindex: MetricNode[T] = MetricNode(client, f'{base_path}/quarterindex') - self.by_semesterindex: MetricNode[T] = MetricNode(client, f'{base_path}/semesterindex') - self.by_weekindex: MetricNode[T] = MetricNode(client, f'{base_path}/weekindex') - self.by_yearindex: MetricNode[T] = MetricNode(client, f'{base_path}/yearindex') - -class Indexes28(Generic[T]): - """Index accessor for metrics with 6 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_decadeindex: MetricNode[T] = MetricNode(client, f'{base_path}/decadeindex') - self.by_monthindex: MetricNode[T] = MetricNode(client, f'{base_path}/monthindex') - self.by_quarterindex: MetricNode[T] = MetricNode(client, f'{base_path}/quarterindex') - self.by_semesterindex: MetricNode[T] = MetricNode(client, f'{base_path}/semesterindex') - self.by_weekindex: MetricNode[T] = MetricNode(client, f'{base_path}/weekindex') - self.by_yearindex: MetricNode[T] = MetricNode(client, f'{base_path}/yearindex') - -class Indexes15(Generic[T]): - """Index accessor for metrics with 3 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_quarterindex: MetricNode[T] = MetricNode(client, f'{base_path}/quarterindex') - self.by_semesterindex: MetricNode[T] = MetricNode(client, f'{base_path}/semesterindex') - self.by_yearindex: MetricNode[T] = MetricNode(client, f'{base_path}/yearindex') - -class Indexes13(Generic[T]): - """Index accessor for metrics with 2 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_dateindex: MetricNode[T] = MetricNode(client, f'{base_path}/dateindex') - self.by_height: MetricNode[T] = MetricNode(client, f'{base_path}/height') - -class Indexes14(Generic[T]): - """Index accessor for metrics with 2 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_monthindex: MetricNode[T] = MetricNode(client, f'{base_path}/monthindex') - self.by_weekindex: MetricNode[T] = MetricNode(client, f'{base_path}/weekindex') - -class Indexes2(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_height: MetricNode[T] = MetricNode(client, f'{base_path}/height') - -class Indexes5(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_dateindex: MetricNode[T] = MetricNode(client, f'{base_path}/dateindex') - -class Indexes6(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_txindex: MetricNode[T] = MetricNode(client, f'{base_path}/txindex') - -class Indexes7(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_decadeindex: MetricNode[T] = MetricNode(client, f'{base_path}/decadeindex') - -class Indexes8(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_monthindex: MetricNode[T] = MetricNode(client, f'{base_path}/monthindex') - -class Indexes9(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_quarterindex: MetricNode[T] = MetricNode(client, f'{base_path}/quarterindex') - -class Indexes10(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_semesterindex: MetricNode[T] = MetricNode(client, f'{base_path}/semesterindex') - -class Indexes11(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_weekindex: MetricNode[T] = MetricNode(client, f'{base_path}/weekindex') - -class Indexes12(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_yearindex: MetricNode[T] = MetricNode(client, f'{base_path}/yearindex') - -class Indexes16(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_p2aaddressindex: MetricNode[T] = MetricNode(client, f'{base_path}/p2aaddressindex') - -class Indexes17(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_p2pk33addressindex: MetricNode[T] = MetricNode(client, f'{base_path}/p2pk33addressindex') - -class Indexes18(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_p2pk65addressindex: MetricNode[T] = MetricNode(client, f'{base_path}/p2pk65addressindex') - -class Indexes19(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_p2pkhaddressindex: MetricNode[T] = MetricNode(client, f'{base_path}/p2pkhaddressindex') - -class Indexes20(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_p2shaddressindex: MetricNode[T] = MetricNode(client, f'{base_path}/p2shaddressindex') - -class Indexes21(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_p2traddressindex: MetricNode[T] = MetricNode(client, f'{base_path}/p2traddressindex') - -class Indexes22(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_p2wpkhaddressindex: MetricNode[T] = MetricNode(client, f'{base_path}/p2wpkhaddressindex') - -class Indexes23(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_p2wshaddressindex: MetricNode[T] = MetricNode(client, f'{base_path}/p2wshaddressindex') - -class Indexes24(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_txinindex: MetricNode[T] = MetricNode(client, f'{base_path}/txinindex') - -class Indexes25(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_txoutindex: MetricNode[T] = MetricNode(client, f'{base_path}/txoutindex') - -class Indexes29(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_emptyaddressindex: MetricNode[T] = MetricNode(client, f'{base_path}/emptyaddressindex') - -class Indexes30(Generic[T]): - """Index accessor for metrics with 1 indexes.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.by_loadedaddressindex: MetricNode[T] = MetricNode(client, f'{base_path}/loadedaddressindex') - -# Reusable structural pattern classes - -class RealizedPattern3: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.adjusted_sopr: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/adjusted_sopr') - self.adjusted_sopr_30d_ema: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/adjusted_sopr_30d_ema') - self.adjusted_sopr_7d_ema: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/adjusted_sopr_7d_ema') - self.adjusted_value_created: Indexes3[Dollars] = Indexes3(client, f'{base_path}/adjusted_value_created') - self.adjusted_value_destroyed: Indexes3[Dollars] = Indexes3(client, f'{base_path}/adjusted_value_destroyed') - self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/neg_realized_loss') - self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/net_realized_pnl') - self.net_realized_pnl_cumulative_30d_delta: Indexes[Dollars] = Indexes(client, f'{base_path}/net_realized_pnl_cumulative_30d_delta') - self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: Indexes[StoredF32] = Indexes(client, f'{base_path}/net_realized_pnl_cumulative_30d_delta_rel_to_market_cap') - self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: Indexes[StoredF32] = Indexes(client, f'{base_path}/net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap') - self.net_realized_pnl_rel_to_realized_cap: Indexes2[StoredF32] = Indexes2(client, f'{base_path}/net_realized_pnl_rel_to_realized_cap') - self.realized_cap: Indexes3[Dollars] = Indexes3(client, f'{base_path}/realized_cap') - self.realized_cap_30d_delta: Indexes[Dollars] = Indexes(client, f'{base_path}/realized_cap_30d_delta') - self.realized_cap_rel_to_own_market_cap: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/realized_cap_rel_to_own_market_cap') - self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/realized_loss') - self.realized_loss_rel_to_realized_cap: Indexes2[StoredF32] = Indexes2(client, f'{base_path}/realized_loss_rel_to_realized_cap') - self.realized_price: Indexes3[Dollars] = Indexes3(client, f'{base_path}/realized_price') - self.realized_price_extra: ActivePriceRatioPattern = ActivePriceRatioPattern(client, f'{base_path}/realized_price_extra') - self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/realized_profit') - self.realized_profit_rel_to_realized_cap: Indexes2[StoredF32] = Indexes2(client, f'{base_path}/realized_profit_rel_to_realized_cap') - self.realized_profit_to_loss_ratio: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/realized_profit_to_loss_ratio') - self.realized_value: Indexes3[Dollars] = Indexes3(client, f'{base_path}/realized_value') - self.sell_side_risk_ratio: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/sell_side_risk_ratio') - self.sell_side_risk_ratio_30d_ema: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/sell_side_risk_ratio_30d_ema') - self.sell_side_risk_ratio_7d_ema: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/sell_side_risk_ratio_7d_ema') - self.sopr: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/sopr') - self.sopr_30d_ema: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/sopr_30d_ema') - self.sopr_7d_ema: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/sopr_7d_ema') - self.total_realized_pnl: BitcoinPattern2[Dollars] = BitcoinPattern2(client, f'{base_path}/total_realized_pnl') - self.value_created: Indexes3[Dollars] = Indexes3(client, f'{base_path}/value_created') - self.value_destroyed: Indexes3[Dollars] = Indexes3(client, f'{base_path}/value_destroyed') - -class Ratio1ySdPattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self._0sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/_0sd_usd') - self.m0_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m0_5sd') - self.m0_5sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/m0_5sd_usd') - self.m1_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m1_5sd') - self.m1_5sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/m1_5sd_usd') - self.m1sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m1sd') - self.m1sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/m1sd_usd') - self.m2_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m2_5sd') - self.m2_5sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/m2_5sd_usd') - self.m2sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m2sd') - self.m2sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/m2sd_usd') - self.m3sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m3sd') - self.m3sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/m3sd_usd') - self.p0_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p0_5sd') - self.p0_5sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/p0_5sd_usd') - self.p1_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p1_5sd') - self.p1_5sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/p1_5sd_usd') - self.p1sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p1sd') - self.p1sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/p1sd_usd') - self.p2_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p2_5sd') - self.p2_5sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/p2_5sd_usd') - self.p2sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p2sd') - self.p2sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/p2sd_usd') - self.p3sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p3sd') - self.p3sd_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/p3sd_usd') - self.sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/sd') - self.sma: Indexes[StoredF32] = Indexes(client, f'{base_path}/sma') - self.zscore: Indexes[StoredF32] = Indexes(client, f'{base_path}/zscore') - -class RealizedPattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/neg_realized_loss') - self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/net_realized_pnl') - self.net_realized_pnl_cumulative_30d_delta: Indexes[Dollars] = Indexes(client, f'{base_path}/net_realized_pnl_cumulative_30d_delta') - self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: Indexes[StoredF32] = Indexes(client, f'{base_path}/net_realized_pnl_cumulative_30d_delta_rel_to_market_cap') - self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: Indexes[StoredF32] = Indexes(client, f'{base_path}/net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap') - self.net_realized_pnl_rel_to_realized_cap: Indexes2[StoredF32] = Indexes2(client, f'{base_path}/net_realized_pnl_rel_to_realized_cap') - self.realized_cap: Indexes3[Dollars] = Indexes3(client, f'{base_path}/realized_cap') - self.realized_cap_30d_delta: Indexes[Dollars] = Indexes(client, f'{base_path}/realized_cap_30d_delta') - self.realized_cap_rel_to_own_market_cap: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/realized_cap_rel_to_own_market_cap') - self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/realized_loss') - self.realized_loss_rel_to_realized_cap: Indexes2[StoredF32] = Indexes2(client, f'{base_path}/realized_loss_rel_to_realized_cap') - self.realized_price: Indexes3[Dollars] = Indexes3(client, f'{base_path}/realized_price') - self.realized_price_extra: ActivePriceRatioPattern = ActivePriceRatioPattern(client, f'{base_path}/realized_price_extra') - self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/realized_profit') - self.realized_profit_rel_to_realized_cap: Indexes2[StoredF32] = Indexes2(client, f'{base_path}/realized_profit_rel_to_realized_cap') - self.realized_profit_to_loss_ratio: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/realized_profit_to_loss_ratio') - self.realized_value: Indexes3[Dollars] = Indexes3(client, f'{base_path}/realized_value') - self.sell_side_risk_ratio: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/sell_side_risk_ratio') - self.sell_side_risk_ratio_30d_ema: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/sell_side_risk_ratio_30d_ema') - self.sell_side_risk_ratio_7d_ema: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/sell_side_risk_ratio_7d_ema') - self.sopr: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/sopr') - self.sopr_30d_ema: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/sopr_30d_ema') - self.sopr_7d_ema: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/sopr_7d_ema') - self.total_realized_pnl: BitcoinPattern2[Dollars] = BitcoinPattern2(client, f'{base_path}/total_realized_pnl') - self.value_created: Indexes3[Dollars] = Indexes3(client, f'{base_path}/value_created') - self.value_destroyed: Indexes3[Dollars] = Indexes3(client, f'{base_path}/value_destroyed') - -class RealizedPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/neg_realized_loss') - self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/net_realized_pnl') - self.net_realized_pnl_cumulative_30d_delta: Indexes[Dollars] = Indexes(client, f'{base_path}/net_realized_pnl_cumulative_30d_delta') - self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: Indexes[StoredF32] = Indexes(client, f'{base_path}/net_realized_pnl_cumulative_30d_delta_rel_to_market_cap') - self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: Indexes[StoredF32] = Indexes(client, f'{base_path}/net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap') - self.net_realized_pnl_rel_to_realized_cap: Indexes2[StoredF32] = Indexes2(client, f'{base_path}/net_realized_pnl_rel_to_realized_cap') - self.realized_cap: Indexes3[Dollars] = Indexes3(client, f'{base_path}/realized_cap') - self.realized_cap_30d_delta: Indexes[Dollars] = Indexes(client, f'{base_path}/realized_cap_30d_delta') - self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/realized_loss') - self.realized_loss_rel_to_realized_cap: Indexes2[StoredF32] = Indexes2(client, f'{base_path}/realized_loss_rel_to_realized_cap') - self.realized_price: Indexes3[Dollars] = Indexes3(client, f'{base_path}/realized_price') - self.realized_price_extra: RealizedPriceExtraPattern = RealizedPriceExtraPattern(client, f'{base_path}/realized_price_extra') - self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/realized_profit') - self.realized_profit_rel_to_realized_cap: Indexes2[StoredF32] = Indexes2(client, f'{base_path}/realized_profit_rel_to_realized_cap') - self.realized_value: Indexes3[Dollars] = Indexes3(client, f'{base_path}/realized_value') - self.sell_side_risk_ratio: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/sell_side_risk_ratio') - self.sell_side_risk_ratio_30d_ema: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/sell_side_risk_ratio_30d_ema') - self.sell_side_risk_ratio_7d_ema: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/sell_side_risk_ratio_7d_ema') - self.sopr: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/sopr') - self.sopr_30d_ema: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/sopr_30d_ema') - self.sopr_7d_ema: Indexes5[StoredF64] = Indexes5(client, f'{base_path}/sopr_7d_ema') - self.total_realized_pnl: BitcoinPattern2[Dollars] = BitcoinPattern2(client, f'{base_path}/total_realized_pnl') - self.value_created: Indexes3[Dollars] = Indexes3(client, f'{base_path}/value_created') - self.value_destroyed: Indexes3[Dollars] = Indexes3(client, f'{base_path}/value_destroyed') - -class Price13dEmaPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.price: Indexes[Dollars] = Indexes(client, f'/{acc}') - self.ratio: Indexes[StoredF32] = Indexes(client, f'/{acc}_ratio') - self.ratio_1m_sma: Indexes[StoredF32] = Indexes(client, f'/{acc}_ratio_1m_sma') - self.ratio_1w_sma: Indexes[StoredF32] = Indexes(client, f'/{acc}_ratio_1w_sma') - self.ratio_1y_sd: Ratio1ySdPattern2 = Ratio1ySdPattern2(client, f'{acc}_ratio_1y_sd') - self.ratio_2y_sd: Ratio1ySdPattern2 = Ratio1ySdPattern2(client, f'{acc}_ratio_2y_sd') - self.ratio_4y_sd: Ratio1ySdPattern2 = Ratio1ySdPattern2(client, f'{acc}_ratio_4y_sd') - self.ratio_pct1: Indexes[StoredF32] = Indexes(client, f'/{acc}_ratio_pct1') - self.ratio_pct1_usd: Indexes[Dollars] = Indexes(client, f'/{acc}_ratio_pct1_usd') - self.ratio_pct2: Indexes[StoredF32] = Indexes(client, f'/{acc}_ratio_pct2') - self.ratio_pct2_usd: Indexes[Dollars] = Indexes(client, f'/{acc}_ratio_pct2_usd') - self.ratio_pct5: Indexes[StoredF32] = Indexes(client, f'/{acc}_ratio_pct5') - self.ratio_pct5_usd: Indexes[Dollars] = Indexes(client, f'/{acc}_ratio_pct5_usd') - self.ratio_pct95: Indexes[StoredF32] = Indexes(client, f'/{acc}_ratio_pct95') - self.ratio_pct95_usd: Indexes[Dollars] = Indexes(client, f'/{acc}_ratio_pct95_usd') - self.ratio_pct98: Indexes[StoredF32] = Indexes(client, f'/{acc}_ratio_pct98') - self.ratio_pct98_usd: Indexes[Dollars] = Indexes(client, f'/{acc}_ratio_pct98_usd') - self.ratio_pct99: Indexes[StoredF32] = Indexes(client, f'/{acc}_ratio_pct99') - self.ratio_pct99_usd: Indexes[Dollars] = Indexes(client, f'/{acc}_ratio_pct99_usd') - self.ratio_sd: Ratio1ySdPattern2 = Ratio1ySdPattern2(client, f'{acc}_ratio_sd') - -class PricePercentilesPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.pct05: Indexes[Dollars] = Indexes(client, f'{base_path}/pct05') - self.pct10: Indexes[Dollars] = Indexes(client, f'{base_path}/pct10') - self.pct15: Indexes[Dollars] = Indexes(client, f'{base_path}/pct15') - self.pct20: Indexes[Dollars] = Indexes(client, f'{base_path}/pct20') - self.pct25: Indexes[Dollars] = Indexes(client, f'{base_path}/pct25') - self.pct30: Indexes[Dollars] = Indexes(client, f'{base_path}/pct30') - self.pct35: Indexes[Dollars] = Indexes(client, f'{base_path}/pct35') - self.pct40: Indexes[Dollars] = Indexes(client, f'{base_path}/pct40') - self.pct45: Indexes[Dollars] = Indexes(client, f'{base_path}/pct45') - self.pct50: Indexes[Dollars] = Indexes(client, f'{base_path}/pct50') - self.pct55: Indexes[Dollars] = Indexes(client, f'{base_path}/pct55') - self.pct60: Indexes[Dollars] = Indexes(client, f'{base_path}/pct60') - self.pct65: Indexes[Dollars] = Indexes(client, f'{base_path}/pct65') - self.pct70: Indexes[Dollars] = Indexes(client, f'{base_path}/pct70') - self.pct75: Indexes[Dollars] = Indexes(client, f'{base_path}/pct75') - self.pct80: Indexes[Dollars] = Indexes(client, f'{base_path}/pct80') - self.pct85: Indexes[Dollars] = Indexes(client, f'{base_path}/pct85') - self.pct90: Indexes[Dollars] = Indexes(client, f'{base_path}/pct90') - self.pct95: Indexes[Dollars] = Indexes(client, f'{base_path}/pct95') - -class RelativePattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.neg_unrealized_loss_rel_to_market_cap: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/neg_unrealized_loss_rel_to_market_cap') - self.neg_unrealized_loss_rel_to_own_market_cap: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/neg_unrealized_loss_rel_to_own_market_cap') - self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/neg_unrealized_loss_rel_to_own_total_unrealized_pnl') - self.net_unrealized_pnl_rel_to_market_cap: Indexes26[StoredF32] = Indexes26(client, f'{base_path}/net_unrealized_pnl_rel_to_market_cap') - self.net_unrealized_pnl_rel_to_own_market_cap: Indexes26[StoredF32] = Indexes26(client, f'{base_path}/net_unrealized_pnl_rel_to_own_market_cap') - self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: Indexes26[StoredF32] = Indexes26(client, f'{base_path}/net_unrealized_pnl_rel_to_own_total_unrealized_pnl') - self.supply_in_loss_rel_to_circulating_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_loss_rel_to_circulating_supply') - self.supply_in_loss_rel_to_own_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_loss_rel_to_own_supply') - self.supply_in_profit_rel_to_circulating_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_profit_rel_to_circulating_supply') - self.supply_in_profit_rel_to_own_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_profit_rel_to_own_supply') - self.supply_rel_to_circulating_supply: Indexes[StoredF64] = Indexes(client, f'{base_path}/supply_rel_to_circulating_supply') - self.unrealized_loss_rel_to_market_cap: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_loss_rel_to_market_cap') - self.unrealized_loss_rel_to_own_market_cap: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_loss_rel_to_own_market_cap') - self.unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_loss_rel_to_own_total_unrealized_pnl') - self.unrealized_profit_rel_to_market_cap: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_profit_rel_to_market_cap') - self.unrealized_profit_rel_to_own_market_cap: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_profit_rel_to_own_market_cap') - self.unrealized_profit_rel_to_own_total_unrealized_pnl: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_profit_rel_to_own_total_unrealized_pnl') - -class Ratio1ySdPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.m0_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m0_5sd') - self.m1_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m1_5sd') - self.m1sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m1sd') - self.m2_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m2_5sd') - self.m2sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m2sd') - self.m3sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/m3sd') - self.p0_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p0_5sd') - self.p1_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p1_5sd') - self.p1sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p1sd') - self.p2_5sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p2_5sd') - self.p2sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p2sd') - self.p3sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/p3sd') - self.sd: Indexes[StoredF32] = Indexes(client, f'{base_path}/sd') - self.sma: Indexes[StoredF32] = Indexes(client, f'{base_path}/sma') - self.zscore: Indexes[StoredF32] = Indexes(client, f'{base_path}/zscore') - -class AXbtPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self._1d_dominance: BlockCountPattern[StoredF32] = BlockCountPattern(client, f'{base_path}/1d_dominance') - self._1m_blocks_mined: Indexes[StoredU32] = Indexes(client, f'{base_path}/1m_blocks_mined') - self._1m_dominance: Indexes[StoredF32] = Indexes(client, f'{base_path}/1m_dominance') - self._1w_blocks_mined: Indexes[StoredU32] = Indexes(client, f'{base_path}/1w_blocks_mined') - self._1w_dominance: Indexes[StoredF32] = Indexes(client, f'{base_path}/1w_dominance') - self._1y_blocks_mined: Indexes[StoredU32] = Indexes(client, f'{base_path}/1y_blocks_mined') - self._1y_dominance: Indexes[StoredF32] = Indexes(client, f'{base_path}/1y_dominance') - self.blocks_mined: BlockCountPattern[StoredU32] = BlockCountPattern(client, f'{base_path}/blocks_mined') - self.coinbase: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, f'{base_path}/coinbase') - self.days_since_block: Indexes[StoredU16] = Indexes(client, f'{base_path}/days_since_block') - self.dominance: BlockCountPattern[StoredF32] = BlockCountPattern(client, f'{base_path}/dominance') - self.fee: FeePattern2 = FeePattern2(client, f'{base_path}/fee') - self.subsidy: FeePattern2 = FeePattern2(client, f'{base_path}/subsidy') - -class ActivePriceRatioPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.ratio: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio') - self.ratio_1m_sma: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio_1m_sma') - self.ratio_1w_sma: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio_1w_sma') - self.ratio_1y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, f'{base_path}/ratio_1y_sd') - self.ratio_2y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, f'{base_path}/ratio_2y_sd') - self.ratio_4y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, f'{base_path}/ratio_4y_sd') - self.ratio_pct1: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio_pct1') - self.ratio_pct2: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio_pct2') - self.ratio_pct5: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio_pct5') - self.ratio_pct95: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio_pct95') - self.ratio_pct98: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio_pct98') - self.ratio_pct99: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio_pct99') - self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, f'{base_path}/ratio_sd') - -class BitcoinPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.average: Indexes4[T] = Indexes4(client, f'{base_path}/average') - self.base: Indexes2[T] = Indexes2(client, f'{base_path}/base') - self.cumulative: Indexes3[T] = Indexes3(client, f'{base_path}/cumulative') - self.max: Indexes4[T] = Indexes4(client, f'{base_path}/max') - self.median: Indexes5[T] = Indexes5(client, f'{base_path}/median') - self.min: Indexes4[T] = Indexes4(client, f'{base_path}/min') - self.pct10: Indexes5[T] = Indexes5(client, f'{base_path}/pct10') - self.pct25: Indexes5[T] = Indexes5(client, f'{base_path}/pct25') - self.pct75: Indexes5[T] = Indexes5(client, f'{base_path}/pct75') - self.pct90: Indexes5[T] = Indexes5(client, f'{base_path}/pct90') - self.sum: Indexes4[T] = Indexes4(client, f'{base_path}/sum') - -class BlockSizePattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.average: Indexes4[T] = Indexes4(client, f'{base_path}/average') - self.cumulative: Indexes3[T] = Indexes3(client, f'{base_path}/cumulative') - self.max: Indexes4[T] = Indexes4(client, f'{base_path}/max') - self.median: Indexes5[T] = Indexes5(client, f'{base_path}/median') - self.min: Indexes4[T] = Indexes4(client, f'{base_path}/min') - self.pct10: Indexes5[T] = Indexes5(client, f'{base_path}/pct10') - self.pct25: Indexes5[T] = Indexes5(client, f'{base_path}/pct25') - self.pct75: Indexes5[T] = Indexes5(client, f'{base_path}/pct75') - self.pct90: Indexes5[T] = Indexes5(client, f'{base_path}/pct90') - self.sum: Indexes4[T] = Indexes4(client, f'{base_path}/sum') - -class RelativePattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.neg_unrealized_loss_rel_to_market_cap: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/neg_unrealized_loss_rel_to_market_cap') - self.net_unrealized_pnl_rel_to_market_cap: Indexes26[StoredF32] = Indexes26(client, f'{base_path}/net_unrealized_pnl_rel_to_market_cap') - self.supply_in_loss_rel_to_circulating_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_loss_rel_to_circulating_supply') - self.supply_in_loss_rel_to_own_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_loss_rel_to_own_supply') - self.supply_in_profit_rel_to_circulating_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_profit_rel_to_circulating_supply') - self.supply_in_profit_rel_to_own_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_profit_rel_to_own_supply') - self.supply_rel_to_circulating_supply: Indexes[StoredF64] = Indexes(client, f'{base_path}/supply_rel_to_circulating_supply') - self.unrealized_loss_rel_to_market_cap: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_loss_rel_to_market_cap') - self.unrealized_profit_rel_to_market_cap: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_profit_rel_to_market_cap') - -class UnrealizedPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.neg_unrealized_loss: Indexes26[Dollars] = Indexes26(client, f'{base_path}/neg_unrealized_loss') - self.net_unrealized_pnl: Indexes26[Dollars] = Indexes26(client, f'{base_path}/net_unrealized_pnl') - self.supply_in_loss: SupplyPattern = SupplyPattern(client, f'{base_path}/supply_in_loss') - self.supply_in_loss_value: SupplyValuePattern = SupplyValuePattern(client, f'{base_path}/supply_in_loss_value') - self.supply_in_profit: SupplyPattern = SupplyPattern(client, f'{base_path}/supply_in_profit') - self.supply_in_profit_value: SupplyValuePattern = SupplyValuePattern(client, f'{base_path}/supply_in_profit_value') - self.total_unrealized_pnl: Indexes26[Dollars] = Indexes26(client, f'{base_path}/total_unrealized_pnl') - self.unrealized_loss: Indexes26[Dollars] = Indexes26(client, f'{base_path}/unrealized_loss') - self.unrealized_profit: Indexes26[Dollars] = Indexes26(client, f'{base_path}/unrealized_profit') - -class Constant0Pattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.dateindex: Indexes5[T] = Indexes5(client, f'/{acc}') - self.decadeindex: Indexes7[T] = Indexes7(client, f'/{acc}') - self.height: Indexes2[T] = Indexes2(client, f'/{acc}') - self.monthindex: Indexes8[T] = Indexes8(client, f'/{acc}') - self.quarterindex: Indexes9[T] = Indexes9(client, f'/{acc}') - self.semesterindex: Indexes10[T] = Indexes10(client, f'/{acc}') - self.weekindex: Indexes11[T] = Indexes11(client, f'/{acc}') - self.yearindex: Indexes12[T] = Indexes12(client, f'/{acc}') - -class AddresstypeToHeightToAddrCountPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.p2a: Indexes16[T] = Indexes16(client, f'{base_path}/p2a') - self.p2pk33: Indexes17[T] = Indexes17(client, f'{base_path}/p2pk33') - self.p2pk65: Indexes18[T] = Indexes18(client, f'{base_path}/p2pk65') - self.p2pkh: Indexes19[T] = Indexes19(client, f'{base_path}/p2pkh') - self.p2sh: Indexes20[T] = Indexes20(client, f'{base_path}/p2sh') - self.p2tr: Indexes21[T] = Indexes21(client, f'{base_path}/p2tr') - self.p2wpkh: Indexes22[T] = Indexes22(client, f'{base_path}/p2wpkh') - self.p2wsh: Indexes23[T] = Indexes23(client, f'{base_path}/p2wsh') - -class BlockIntervalPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.average: Indexes3[T] = Indexes3(client, f'/{acc}_avg') - self.max: Indexes3[T] = Indexes3(client, f'/{acc}_max') - self.median: Indexes2[T] = Indexes2(client, f'/{acc}_median') - self.min: Indexes3[T] = Indexes3(client, f'/{acc}_min') - self.pct10: Indexes2[T] = Indexes2(client, f'/{acc}_pct10') - self.pct25: Indexes2[T] = Indexes2(client, f'/{acc}_pct25') - self.pct75: Indexes2[T] = Indexes2(client, f'/{acc}_pct75') - self.pct90: Indexes2[T] = Indexes2(client, f'/{acc}_pct90') - -class _0satsPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.activity: ActivityPattern = ActivityPattern(client, f'{base_path}/activity') - self.addr_count: Indexes3[StoredU64] = Indexes3(client, f'{base_path}/addr_count') - self.price_paid: PricePaidPattern = PricePaidPattern(client, f'{base_path}/price_paid') - self.realized: RealizedPattern = RealizedPattern(client, f'{base_path}/realized') - self.relative: RelativePattern = RelativePattern(client, f'{base_path}/relative') - self.supply: SupplyPattern2 = SupplyPattern2(client, f'{base_path}/supply') - self.unrealized: UnrealizedPattern = UnrealizedPattern(client, f'{base_path}/unrealized') - -class UpTo1dPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.activity: ActivityPattern = ActivityPattern(client, f'{base_path}/activity') - self.price_paid: PricePaidPattern2 = PricePaidPattern2(client, f'{base_path}/price_paid') - self.realized: RealizedPattern3 = RealizedPattern3(client, f'{base_path}/realized') - self.relative: RelativePattern2 = RelativePattern2(client, f'{base_path}/relative') - self.supply: SupplyPattern2 = SupplyPattern2(client, f'{base_path}/supply') - self.unrealized: UnrealizedPattern = UnrealizedPattern(client, f'{base_path}/unrealized') - -class _0satsPattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.activity: ActivityPattern = ActivityPattern(client, f'{base_path}/activity') - self.price_paid: PricePaidPattern = PricePaidPattern(client, f'{base_path}/price_paid') - self.realized: RealizedPattern = RealizedPattern(client, f'{base_path}/realized') - self.relative: RelativePattern = RelativePattern(client, f'{base_path}/relative') - self.supply: SupplyPattern2 = SupplyPattern2(client, f'{base_path}/supply') - self.unrealized: UnrealizedPattern = UnrealizedPattern(client, f'{base_path}/unrealized') - -class _10yTo12yPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.activity: ActivityPattern = ActivityPattern(client, f'{base_path}/activity') - self.price_paid: PricePaidPattern2 = PricePaidPattern2(client, f'{base_path}/price_paid') - self.realized: RealizedPattern2 = RealizedPattern2(client, f'{base_path}/realized') - self.relative: RelativePattern2 = RelativePattern2(client, f'{base_path}/relative') - self.supply: SupplyPattern2 = SupplyPattern2(client, f'{base_path}/supply') - self.unrealized: UnrealizedPattern = UnrealizedPattern(client, f'{base_path}/unrealized') - -class ActivityPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.coinblocks_destroyed: BlockCountPattern[StoredF64] = BlockCountPattern(client, f'{base_path}/coinblocks_destroyed') - self.coindays_destroyed: BlockCountPattern[StoredF64] = BlockCountPattern(client, f'{base_path}/coindays_destroyed') - self.satblocks_destroyed: Indexes2[Sats] = Indexes2(client, f'{base_path}/satblocks_destroyed') - self.satdays_destroyed: Indexes2[Sats] = Indexes2(client, f'{base_path}/satdays_destroyed') - self.sent: FeePattern2 = FeePattern2(client, f'{base_path}/sent') - -class SupplyPattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.supply: SupplyPattern = SupplyPattern(client, f'{base_path}/supply') - self.supply_half: ActiveSupplyPattern = ActiveSupplyPattern(client, f'{base_path}/supply_half') - self.supply_half_value: ActiveSupplyPattern = ActiveSupplyPattern(client, f'{base_path}/supply_half_value') - self.supply_value: SupplyValuePattern = SupplyValuePattern(client, f'{base_path}/supply_value') - self.utxo_count: Indexes3[StoredU64] = Indexes3(client, f'{base_path}/utxo_count') - -class FeePattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.base: Indexes2[Sats] = Indexes2(client, f'{base_path}/base') - self.bitcoin: BlockCountPattern[Bitcoin] = BlockCountPattern(client, f'{base_path}/bitcoin') - self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/dollars') - self.sats: SatsPattern = SatsPattern(client, f'{base_path}/sats') - -class SupplyPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.base: Indexes2[Sats] = Indexes2(client, f'{base_path}/base') - self.bitcoin: Indexes[Bitcoin] = Indexes(client, f'{base_path}/bitcoin') - self.dollars: Indexes[Dollars] = Indexes(client, f'{base_path}/dollars') - self.sats: Indexes[Sats] = Indexes(client, f'{base_path}/sats') - -class UnclaimedRewardsPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.bitcoin: BlockCountPattern[Bitcoin] = BlockCountPattern(client, f'{base_path}/bitcoin') - self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, f'{base_path}/dollars') - self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, f'{base_path}/sats') - -class PricePaidPattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.max_price_paid: Indexes3[Dollars] = Indexes3(client, f'{base_path}/max_price_paid') - self.min_price_paid: Indexes3[Dollars] = Indexes3(client, f'{base_path}/min_price_paid') - self.price_percentiles: PricePercentilesPattern = PricePercentilesPattern(client, f'{base_path}/price_percentiles') - -class CoinbasePattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.bitcoin: BitcoinPattern[Bitcoin] = BitcoinPattern(client, f'{base_path}/bitcoin') - self.dollars: BitcoinPattern[Dollars] = BitcoinPattern(client, f'{base_path}/dollars') - self.sats: BitcoinPattern[Sats] = BitcoinPattern(client, f'{base_path}/sats') - -class ActiveSupplyPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.bitcoin: Indexes3[Bitcoin] = Indexes3(client, f'{base_path}/bitcoin') - self.dollars: Indexes3[Dollars] = Indexes3(client, f'{base_path}/dollars') - self.sats: Indexes3[Sats] = Indexes3(client, f'{base_path}/sats') - -class BlockCountPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.base: Indexes2[T] = Indexes2(client, f'{base_path}/base') - self.cumulative: Indexes3[T] = Indexes3(client, f'{base_path}/cumulative') - self.sum: Indexes4[T] = Indexes4(client, f'{base_path}/sum') - -class PricePaidPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.max_price_paid: Indexes3[Dollars] = Indexes3(client, f'{base_path}/max_price_paid') - self.min_price_paid: Indexes3[Dollars] = Indexes3(client, f'{base_path}/min_price_paid') - -class _1dReturns1mSdPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.sd: Indexes[StoredF32] = Indexes(client, f'/{acc}_sd') - self.sma: Indexes[StoredF32] = Indexes(client, f'/{acc}_sma') - -class SupplyValuePattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.bitcoin: Indexes2[Bitcoin] = Indexes2(client, f'{base_path}/bitcoin') - self.dollars: Indexes2[Dollars] = Indexes2(client, f'{base_path}/dollars') - -class SatsPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.cumulative: Indexes3[Sats] = Indexes3(client, f'{base_path}/cumulative') - self.sum: Indexes4[Sats] = Indexes4(client, f'{base_path}/sum') - -class BitcoinPattern2(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.base: Indexes2[T] = Indexes2(client, f'{base_path}/base') - self.sum: Indexes4[T] = Indexes4(client, f'{base_path}/sum') - -class RealizedPriceExtraPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, base_path: str): - self.ratio: Indexes[StoredF32] = Indexes(client, f'{base_path}/ratio') - -# Catalog tree classes - -class CatalogTree: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.computed: CatalogTree_Computed = CatalogTree_Computed(client, f'{base_path}/computed') - self.indexed: CatalogTree_Indexed = CatalogTree_Indexed(client, f'{base_path}/indexed') - -class CatalogTree_Computed: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.blks: CatalogTree_Computed_Blks = CatalogTree_Computed_Blks(client, f'{base_path}/blks') - self.chain: CatalogTree_Computed_Chain = CatalogTree_Computed_Chain(client, f'{base_path}/chain') - self.cointime: CatalogTree_Computed_Cointime = CatalogTree_Computed_Cointime(client, f'{base_path}/cointime') - self.constants: CatalogTree_Computed_Constants = CatalogTree_Computed_Constants(client, f'{base_path}/constants') - self.fetched: CatalogTree_Computed_Fetched = CatalogTree_Computed_Fetched(client, f'{base_path}/fetched') - self.indexes: CatalogTree_Computed_Indexes = CatalogTree_Computed_Indexes(client, f'{base_path}/indexes') - self.market: CatalogTree_Computed_Market = CatalogTree_Computed_Market(client, f'{base_path}/market') - self.pools: CatalogTree_Computed_Pools = CatalogTree_Computed_Pools(client, f'{base_path}/pools') - self.price: CatalogTree_Computed_Price = CatalogTree_Computed_Price(client, f'{base_path}/price') - self.stateful: CatalogTree_Computed_Stateful = CatalogTree_Computed_Stateful(client, f'{base_path}/stateful') - self.txins: CatalogTree_Computed_Txins = CatalogTree_Computed_Txins(client, f'{base_path}/txins') - self.txouts: CatalogTree_Computed_Txouts = CatalogTree_Computed_Txouts(client, f'{base_path}/txouts') - -class CatalogTree_Computed_Blks: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.position: MetricNode[BlkPosition] = MetricNode(client, f'{base_path}/position') - -class CatalogTree_Computed_Chain: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._1m_block_count: Indexes[StoredU32] = Indexes(client, f'{base_path}/1m_block_count') - self._1w_block_count: Indexes[StoredU32] = Indexes(client, f'{base_path}/1w_block_count') - self._1y_block_count: Indexes[StoredU32] = Indexes(client, f'{base_path}/1y_block_count') - self._24h_block_count: Indexes2[StoredU32] = Indexes2(client, f'{base_path}/24h_block_count') - self._24h_coinbase_sum: Indexes2[Sats] = Indexes2(client, f'{base_path}/24h_coinbase_sum') - self._24h_coinbase_usd_sum: Indexes2[Dollars] = Indexes2(client, f'{base_path}/24h_coinbase_usd_sum') - self.annualized_volume: Indexes[Sats] = Indexes(client, f'{base_path}/annualized_volume') - self.annualized_volume_btc: Indexes[Bitcoin] = Indexes(client, f'{base_path}/annualized_volume_btc') - self.annualized_volume_usd: Indexes[Dollars] = Indexes(client, f'{base_path}/annualized_volume_usd') - self.block_count: BlockCountPattern[StoredU32] = BlockCountPattern(client, f'{base_path}/block_count') - self.block_count_target: Indexes[StoredU64] = Indexes(client, f'{base_path}/block_count_target') - self.block_interval: BlockIntervalPattern[Timestamp] = BlockIntervalPattern(client, 'block_interval') - self.block_size: BlockSizePattern[StoredU64] = BlockSizePattern(client, f'{base_path}/block_size') - self.block_vbytes: BlockSizePattern[StoredU64] = BlockSizePattern(client, f'{base_path}/block_vbytes') - self.block_weight: BlockSizePattern[Weight] = BlockSizePattern(client, f'{base_path}/block_weight') - self.blocks_before_next_difficulty_adjustment: Indexes3[StoredU32] = Indexes3(client, f'{base_path}/blocks_before_next_difficulty_adjustment') - self.blocks_before_next_halving: Indexes3[StoredU32] = Indexes3(client, f'{base_path}/blocks_before_next_halving') - self.coinbase: CoinbasePattern = CoinbasePattern(client, f'{base_path}/coinbase') - self.days_before_next_difficulty_adjustment: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/days_before_next_difficulty_adjustment') - self.days_before_next_halving: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/days_before_next_halving') - self.difficulty: Indexes4[StoredF64] = Indexes4(client, f'{base_path}/difficulty') - self.difficulty_adjustment: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/difficulty_adjustment') - self.difficulty_as_hash: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/difficulty_as_hash') - self.difficultyepoch: Indexes[DifficultyEpoch] = Indexes(client, f'{base_path}/difficultyepoch') - self.emptyoutput_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/emptyoutput_count') - self.exact_utxo_count: Indexes3[StoredU64] = Indexes3(client, f'{base_path}/exact_utxo_count') - self.fee: CatalogTree_Computed_Chain_Fee = CatalogTree_Computed_Chain_Fee(client, f'{base_path}/fee') - self.fee_dominance: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/fee_dominance') - self.fee_rate: CatalogTree_Computed_Chain_FeeRate = CatalogTree_Computed_Chain_FeeRate(client, f'{base_path}/fee_rate') - self.halvingepoch: Indexes[HalvingEpoch] = Indexes(client, f'{base_path}/halvingepoch') - self.hash_price_phs: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_price_phs') - self.hash_price_phs_min: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_price_phs_min') - self.hash_price_rebound: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_price_rebound') - self.hash_price_ths: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_price_ths') - self.hash_price_ths_min: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_price_ths_min') - self.hash_rate: Indexes3[StoredF64] = Indexes3(client, f'{base_path}/hash_rate') - self.hash_rate_1m_sma: Indexes[StoredF32] = Indexes(client, f'{base_path}/hash_rate_1m_sma') - self.hash_rate_1w_sma: Indexes[StoredF64] = Indexes(client, f'{base_path}/hash_rate_1w_sma') - self.hash_rate_1y_sma: Indexes[StoredF32] = Indexes(client, f'{base_path}/hash_rate_1y_sma') - self.hash_rate_2m_sma: Indexes[StoredF32] = Indexes(client, f'{base_path}/hash_rate_2m_sma') - self.hash_value_phs: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_value_phs') - self.hash_value_phs_min: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_value_phs_min') - self.hash_value_rebound: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_value_rebound') - self.hash_value_ths: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_value_ths') - self.hash_value_ths_min: Indexes3[StoredF32] = Indexes3(client, f'{base_path}/hash_value_ths_min') - self.inflation_rate: Indexes[StoredF32] = Indexes(client, f'{base_path}/inflation_rate') - self.input_count: BlockSizePattern[StoredU64] = BlockSizePattern(client, f'{base_path}/input_count') - self.input_value: Indexes6[Sats] = Indexes6(client, f'{base_path}/input_value') - self.inputs_per_sec: Indexes[StoredF32] = Indexes(client, f'{base_path}/inputs_per_sec') - self.interval: Indexes2[Timestamp] = Indexes2(client, f'{base_path}/interval') - self.is_coinbase: Indexes6[StoredBool] = Indexes6(client, f'{base_path}/is_coinbase') - self.opreturn_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/opreturn_count') - self.output_count: BlockSizePattern[StoredU64] = BlockSizePattern(client, f'{base_path}/output_count') - self.output_value: Indexes6[Sats] = Indexes6(client, f'{base_path}/output_value') - self.outputs_per_sec: Indexes[StoredF32] = Indexes(client, f'{base_path}/outputs_per_sec') - self.p2a_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/p2a_count') - self.p2ms_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/p2ms_count') - self.p2pk33_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/p2pk33_count') - self.p2pk65_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/p2pk65_count') - self.p2pkh_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/p2pkh_count') - self.p2sh_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/p2sh_count') - self.p2tr_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/p2tr_count') - self.p2wpkh_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/p2wpkh_count') - self.p2wsh_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/p2wsh_count') - self.puell_multiple: Indexes[StoredF32] = Indexes(client, f'{base_path}/puell_multiple') - self.sent_sum: CatalogTree_Computed_Chain_SentSum = CatalogTree_Computed_Chain_SentSum(client, f'{base_path}/sent_sum') - self.subsidy: CoinbasePattern = CoinbasePattern(client, f'{base_path}/subsidy') - self.subsidy_dominance: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/subsidy_dominance') - self.subsidy_usd_1y_sma: Indexes[Dollars] = Indexes(client, f'{base_path}/subsidy_usd_1y_sma') - self.timestamp: MetricNode[Timestamp] = MetricNode(client, f'{base_path}/timestamp') - self.tx_btc_velocity: Indexes[StoredF64] = Indexes(client, f'{base_path}/tx_btc_velocity') - self.tx_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/tx_count') - self.tx_per_sec: Indexes[StoredF32] = Indexes(client, f'{base_path}/tx_per_sec') - self.tx_usd_velocity: Indexes[StoredF64] = Indexes(client, f'{base_path}/tx_usd_velocity') - self.tx_v1: BlockCountPattern[StoredU64] = BlockCountPattern(client, f'{base_path}/tx_v1') - self.tx_v2: BlockCountPattern[StoredU64] = BlockCountPattern(client, f'{base_path}/tx_v2') - self.tx_v3: BlockCountPattern[StoredU64] = BlockCountPattern(client, f'{base_path}/tx_v3') - self.tx_vsize: BlockIntervalPattern[VSize] = BlockIntervalPattern(client, 'tx_vsize') - self.tx_weight: BlockIntervalPattern[Weight] = BlockIntervalPattern(client, 'tx_weight') - self.unclaimed_rewards: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, f'{base_path}/unclaimed_rewards') - self.unknownoutput_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, f'{base_path}/unknownoutput_count') - self.vbytes: Indexes2[StoredU64] = Indexes2(client, f'{base_path}/vbytes') - self.vsize: Indexes6[VSize] = Indexes6(client, f'{base_path}/vsize') - self.weight: Indexes6[Weight] = Indexes6(client, f'{base_path}/weight') - -class CatalogTree_Computed_Chain_Fee: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.base: Indexes6[Sats] = Indexes6(client, f'{base_path}/base') - self.bitcoin: BlockSizePattern[Bitcoin] = BlockSizePattern(client, f'{base_path}/bitcoin') - self.bitcoin_txindex: Indexes6[Bitcoin] = Indexes6(client, f'{base_path}/bitcoin_txindex') - self.dollars: BlockSizePattern[Dollars] = BlockSizePattern(client, f'{base_path}/dollars') - self.dollars_txindex: Indexes6[Dollars] = Indexes6(client, f'{base_path}/dollars_txindex') - self.sats: BlockSizePattern[Sats] = BlockSizePattern(client, f'{base_path}/sats') - -class CatalogTree_Computed_Chain_FeeRate: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.average: Indexes3[FeeRate] = Indexes3(client, f'{base_path}/average') - self.base: Indexes6[FeeRate] = Indexes6(client, f'{base_path}/base') - self.max: Indexes3[FeeRate] = Indexes3(client, f'{base_path}/max') - self.median: Indexes2[FeeRate] = Indexes2(client, f'{base_path}/median') - self.min: Indexes3[FeeRate] = Indexes3(client, f'{base_path}/min') - self.pct10: Indexes2[FeeRate] = Indexes2(client, f'{base_path}/pct10') - self.pct25: Indexes2[FeeRate] = Indexes2(client, f'{base_path}/pct25') - self.pct75: Indexes2[FeeRate] = Indexes2(client, f'{base_path}/pct75') - self.pct90: Indexes2[FeeRate] = Indexes2(client, f'{base_path}/pct90') - -class CatalogTree_Computed_Chain_SentSum: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.bitcoin: BitcoinPattern2[Bitcoin] = BitcoinPattern2(client, f'{base_path}/bitcoin') - self.dollars: Indexes3[Dollars] = Indexes3(client, f'{base_path}/dollars') - self.sats: Indexes3[Sats] = Indexes3(client, f'{base_path}/sats') - -class CatalogTree_Computed_Cointime: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.active_cap: Indexes3[Dollars] = Indexes3(client, f'{base_path}/active_cap') - self.active_price: Indexes3[Dollars] = Indexes3(client, f'{base_path}/active_price') - self.active_price_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, f'{base_path}/active_price_ratio') - self.active_supply: ActiveSupplyPattern = ActiveSupplyPattern(client, f'{base_path}/active_supply') - self.activity_to_vaultedness_ratio: Indexes3[StoredF64] = Indexes3(client, f'{base_path}/activity_to_vaultedness_ratio') - self.coinblocks_created: BlockCountPattern[StoredF64] = BlockCountPattern(client, f'{base_path}/coinblocks_created') - self.coinblocks_stored: BlockCountPattern[StoredF64] = BlockCountPattern(client, f'{base_path}/coinblocks_stored') - self.cointime_adj_inflation_rate: Indexes[StoredF32] = Indexes(client, f'{base_path}/cointime_adj_inflation_rate') - self.cointime_adj_tx_btc_velocity: Indexes[StoredF64] = Indexes(client, f'{base_path}/cointime_adj_tx_btc_velocity') - self.cointime_adj_tx_usd_velocity: Indexes[StoredF64] = Indexes(client, f'{base_path}/cointime_adj_tx_usd_velocity') - self.cointime_cap: Indexes3[Dollars] = Indexes3(client, f'{base_path}/cointime_cap') - self.cointime_price: Indexes3[Dollars] = Indexes3(client, f'{base_path}/cointime_price') - self.cointime_price_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, f'{base_path}/cointime_price_ratio') - self.cointime_value_created: BlockCountPattern[StoredF64] = BlockCountPattern(client, f'{base_path}/cointime_value_created') - self.cointime_value_destroyed: BlockCountPattern[StoredF64] = BlockCountPattern(client, f'{base_path}/cointime_value_destroyed') - self.cointime_value_stored: BlockCountPattern[StoredF64] = BlockCountPattern(client, f'{base_path}/cointime_value_stored') - self.investor_cap: Indexes3[Dollars] = Indexes3(client, f'{base_path}/investor_cap') - self.liveliness: Indexes3[StoredF64] = Indexes3(client, f'{base_path}/liveliness') - self.thermo_cap: Indexes3[Dollars] = Indexes3(client, f'{base_path}/thermo_cap') - self.true_market_mean: Indexes3[Dollars] = Indexes3(client, f'{base_path}/true_market_mean') - self.true_market_mean_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, f'{base_path}/true_market_mean_ratio') - self.vaulted_cap: Indexes3[Dollars] = Indexes3(client, f'{base_path}/vaulted_cap') - self.vaulted_price: Indexes3[Dollars] = Indexes3(client, f'{base_path}/vaulted_price') - self.vaulted_price_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, f'{base_path}/vaulted_price_ratio') - self.vaulted_supply: ActiveSupplyPattern = ActiveSupplyPattern(client, f'{base_path}/vaulted_supply') - self.vaultedness: Indexes3[StoredF64] = Indexes3(client, f'{base_path}/vaultedness') - -class CatalogTree_Computed_Constants: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.constant_0: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_0') - self.constant_1: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_1') - self.constant_100: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_100') - self.constant_2: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_2') - self.constant_3: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_3') - self.constant_38_2: Constant0Pattern[StoredF32] = Constant0Pattern(client, 'constant_38_2') - self.constant_4: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_4') - self.constant_50: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_50') - self.constant_600: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_600') - self.constant_61_8: Constant0Pattern[StoredF32] = Constant0Pattern(client, 'constant_61_8') - self.constant_minus_1: Constant0Pattern[StoredI16] = Constant0Pattern(client, 'constant_minus_1') - self.constant_minus_2: Constant0Pattern[StoredI16] = Constant0Pattern(client, 'constant_minus_2') - self.constant_minus_3: Constant0Pattern[StoredI16] = Constant0Pattern(client, 'constant_minus_3') - self.constant_minus_4: Constant0Pattern[StoredI16] = Constant0Pattern(client, 'constant_minus_4') - -class CatalogTree_Computed_Fetched: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.price_ohlc_in_cents: Indexes13[OHLCCents] = Indexes13(client, f'{base_path}/price_ohlc_in_cents') - -class CatalogTree_Computed_Indexes: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.date: Indexes13[Date] = Indexes13(client, f'{base_path}/date') - self.date_fixed: Indexes2[Date] = Indexes2(client, f'{base_path}/date_fixed') - self.dateindex: Indexes13[DateIndex] = Indexes13(client, f'{base_path}/dateindex') - self.dateindex_count: Indexes14[StoredU64] = Indexes14(client, f'{base_path}/dateindex_count') - self.decadeindex: MetricNode[DecadeIndex] = MetricNode(client, f'{base_path}/decadeindex') - self.difficultyepoch: MetricNode[DifficultyEpoch] = MetricNode(client, f'{base_path}/difficultyepoch') - self.emptyoutputindex: MetricNode[EmptyOutputIndex] = MetricNode(client, f'{base_path}/emptyoutputindex') - self.first_dateindex: Indexes14[DateIndex] = Indexes14(client, f'{base_path}/first_dateindex') - self.first_height: MetricNode[Height] = MetricNode(client, f'{base_path}/first_height') - self.first_monthindex: Indexes15[MonthIndex] = Indexes15(client, f'{base_path}/first_monthindex') - self.first_yearindex: Indexes7[YearIndex] = Indexes7(client, f'{base_path}/first_yearindex') - self.halvingepoch: MetricNode[HalvingEpoch] = MetricNode(client, f'{base_path}/halvingepoch') - self.height: Indexes2[Height] = Indexes2(client, f'{base_path}/height') - self.height_count: MetricNode[StoredU64] = MetricNode(client, f'{base_path}/height_count') - self.input_count: Indexes6[StoredU64] = Indexes6(client, f'{base_path}/input_count') - self.monthindex: MetricNode[MonthIndex] = MetricNode(client, f'{base_path}/monthindex') - self.monthindex_count: Indexes15[StoredU64] = Indexes15(client, f'{base_path}/monthindex_count') - self.opreturnindex: MetricNode[OpReturnIndex] = MetricNode(client, f'{base_path}/opreturnindex') - self.output_count: Indexes6[StoredU64] = Indexes6(client, f'{base_path}/output_count') - self.p2aaddressindex: Indexes16[P2AAddressIndex] = Indexes16(client, f'{base_path}/p2aaddressindex') - self.p2msoutputindex: MetricNode[P2MSOutputIndex] = MetricNode(client, f'{base_path}/p2msoutputindex') - self.p2pk33addressindex: Indexes17[P2PK33AddressIndex] = Indexes17(client, f'{base_path}/p2pk33addressindex') - self.p2pk65addressindex: Indexes18[P2PK65AddressIndex] = Indexes18(client, f'{base_path}/p2pk65addressindex') - self.p2pkhaddressindex: Indexes19[P2PKHAddressIndex] = Indexes19(client, f'{base_path}/p2pkhaddressindex') - self.p2shaddressindex: Indexes20[P2SHAddressIndex] = Indexes20(client, f'{base_path}/p2shaddressindex') - self.p2traddressindex: Indexes21[P2TRAddressIndex] = Indexes21(client, f'{base_path}/p2traddressindex') - self.p2wpkhaddressindex: Indexes22[P2WPKHAddressIndex] = Indexes22(client, f'{base_path}/p2wpkhaddressindex') - self.p2wshaddressindex: Indexes23[P2WSHAddressIndex] = Indexes23(client, f'{base_path}/p2wshaddressindex') - self.quarterindex: MetricNode[QuarterIndex] = MetricNode(client, f'{base_path}/quarterindex') - self.semesterindex: MetricNode[SemesterIndex] = MetricNode(client, f'{base_path}/semesterindex') - self.timestamp_fixed: Indexes2[Timestamp] = Indexes2(client, f'{base_path}/timestamp_fixed') - self.txindex: Indexes6[TxIndex] = Indexes6(client, f'{base_path}/txindex') - self.txindex_count: Indexes2[StoredU64] = Indexes2(client, f'{base_path}/txindex_count') - self.txinindex: Indexes24[TxInIndex] = Indexes24(client, f'{base_path}/txinindex') - self.txoutindex: Indexes25[TxOutIndex] = Indexes25(client, f'{base_path}/txoutindex') - self.unknownoutputindex: MetricNode[UnknownOutputIndex] = MetricNode(client, f'{base_path}/unknownoutputindex') - self.weekindex: MetricNode[WeekIndex] = MetricNode(client, f'{base_path}/weekindex') - self.yearindex: MetricNode[YearIndex] = MetricNode(client, f'{base_path}/yearindex') - self.yearindex_count: Indexes7[StoredU64] = Indexes7(client, f'{base_path}/yearindex_count') - -class CatalogTree_Computed_Market: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._1d_returns_1m_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, '1d_returns_1m_sd') - self._1d_returns_1w_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, '1d_returns_1w_sd') - self._1d_returns_1y_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, '1d_returns_1y_sd') - self._10y_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_10y_cagr') - self._10y_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_10y_dca_avg_price') - self._10y_dca_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_10y_dca_cagr') - self._10y_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_10y_dca_returns') - self._10y_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_10y_dca_stack') - self._10y_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_10y_price_returns') - self._1d_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_1d_price_returns') - self._1m_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_1m_dca_avg_price') - self._1m_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_1m_dca_returns') - self._1m_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_1m_dca_stack') - self._1m_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_1m_price_returns') - self._1w_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_1w_dca_avg_price') - self._1w_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_1w_dca_returns') - self._1w_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_1w_dca_stack') - self._1w_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_1w_price_returns') - self._1y_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_1y_dca_avg_price') - self._1y_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_1y_dca_returns') - self._1y_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_1y_dca_stack') - self._1y_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_1y_price_returns') - self._2y_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_2y_cagr') - self._2y_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_2y_dca_avg_price') - self._2y_dca_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_2y_dca_cagr') - self._2y_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_2y_dca_returns') - self._2y_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_2y_dca_stack') - self._2y_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_2y_price_returns') - self._3m_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_3m_dca_avg_price') - self._3m_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_3m_dca_returns') - self._3m_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_3m_dca_stack') - self._3m_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_3m_price_returns') - self._3y_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_3y_cagr') - self._3y_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_3y_dca_avg_price') - self._3y_dca_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_3y_dca_cagr') - self._3y_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_3y_dca_returns') - self._3y_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_3y_dca_stack') - self._3y_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_3y_price_returns') - self._4y_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_4y_cagr') - self._4y_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_4y_dca_avg_price') - self._4y_dca_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_4y_dca_cagr') - self._4y_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_4y_dca_returns') - self._4y_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_4y_dca_stack') - self._4y_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_4y_price_returns') - self._5y_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_5y_cagr') - self._5y_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_5y_dca_avg_price') - self._5y_dca_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_5y_dca_cagr') - self._5y_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_5y_dca_returns') - self._5y_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_5y_dca_stack') - self._5y_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_5y_price_returns') - self._6m_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_6m_dca_avg_price') - self._6m_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_6m_dca_returns') - self._6m_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_6m_dca_stack') - self._6m_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_6m_price_returns') - self._6y_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_6y_cagr') - self._6y_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_6y_dca_avg_price') - self._6y_dca_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_6y_dca_cagr') - self._6y_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_6y_dca_returns') - self._6y_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_6y_dca_stack') - self._6y_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_6y_price_returns') - self._8y_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_8y_cagr') - self._8y_dca_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/_8y_dca_avg_price') - self._8y_dca_cagr: Indexes[StoredF32] = Indexes(client, f'{base_path}/_8y_dca_cagr') - self._8y_dca_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_8y_dca_returns') - self._8y_dca_stack: Indexes[Sats] = Indexes(client, f'{base_path}/_8y_dca_stack') - self._8y_price_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/_8y_price_returns') - self.days_since_price_ath: Indexes[StoredU16] = Indexes(client, f'{base_path}/days_since_price_ath') - self.dca_class_2015_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2015_avg_price') - self.dca_class_2015_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2015_returns') - self.dca_class_2015_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2015_stack') - self.dca_class_2016_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2016_avg_price') - self.dca_class_2016_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2016_returns') - self.dca_class_2016_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2016_stack') - self.dca_class_2017_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2017_avg_price') - self.dca_class_2017_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2017_returns') - self.dca_class_2017_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2017_stack') - self.dca_class_2018_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2018_avg_price') - self.dca_class_2018_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2018_returns') - self.dca_class_2018_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2018_stack') - self.dca_class_2019_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2019_avg_price') - self.dca_class_2019_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2019_returns') - self.dca_class_2019_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2019_stack') - self.dca_class_2020_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2020_avg_price') - self.dca_class_2020_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2020_returns') - self.dca_class_2020_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2020_stack') - self.dca_class_2021_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2021_avg_price') - self.dca_class_2021_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2021_returns') - self.dca_class_2021_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2021_stack') - self.dca_class_2022_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2022_avg_price') - self.dca_class_2022_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2022_returns') - self.dca_class_2022_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2022_stack') - self.dca_class_2023_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2023_avg_price') - self.dca_class_2023_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2023_returns') - self.dca_class_2023_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2023_stack') - self.dca_class_2024_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2024_avg_price') - self.dca_class_2024_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2024_returns') - self.dca_class_2024_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2024_stack') - self.dca_class_2025_avg_price: Indexes[Dollars] = Indexes(client, f'{base_path}/dca_class_2025_avg_price') - self.dca_class_2025_returns: Indexes[StoredF32] = Indexes(client, f'{base_path}/dca_class_2025_returns') - self.dca_class_2025_stack: Indexes[Sats] = Indexes(client, f'{base_path}/dca_class_2025_stack') - self.max_days_between_price_aths: Indexes[StoredU16] = Indexes(client, f'{base_path}/max_days_between_price_aths') - self.max_years_between_price_aths: Indexes[StoredF32] = Indexes(client, f'{base_path}/max_years_between_price_aths') - self.price_10y_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_10y_ago') - self.price_13d_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_13d_ema') - self.price_13d_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_13d_sma') - self.price_144d_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_144d_ema') - self.price_144d_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_144d_sma') - self.price_1d_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1d_ago') - self.price_1m_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1m_ago') - self.price_1m_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_1m_ema') - self.price_1m_max: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1m_max') - self.price_1m_min: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1m_min') - self.price_1m_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_1m_sma') - self.price_1m_volatility: Indexes[StoredF32] = Indexes(client, f'{base_path}/price_1m_volatility') - self.price_1w_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1w_ago') - self.price_1w_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_1w_ema') - self.price_1w_max: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1w_max') - self.price_1w_min: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1w_min') - self.price_1w_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_1w_sma') - self.price_1w_volatility: Indexes[StoredF32] = Indexes(client, f'{base_path}/price_1w_volatility') - self.price_1y_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1y_ago') - self.price_1y_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_1y_ema') - self.price_1y_max: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1y_max') - self.price_1y_min: Indexes[Dollars] = Indexes(client, f'{base_path}/price_1y_min') - self.price_1y_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_1y_sma') - self.price_1y_volatility: Indexes[StoredF32] = Indexes(client, f'{base_path}/price_1y_volatility') - self.price_200d_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_200d_ema') - self.price_200d_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_200d_sma') - self.price_200d_sma_x0_8: Indexes[Dollars] = Indexes(client, f'{base_path}/price_200d_sma_x0_8') - self.price_200d_sma_x2_4: Indexes[Dollars] = Indexes(client, f'{base_path}/price_200d_sma_x2_4') - self.price_200w_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_200w_ema') - self.price_200w_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_200w_sma') - self.price_21d_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_21d_ema') - self.price_21d_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_21d_sma') - self.price_2w_choppiness_index: Indexes[StoredF32] = Indexes(client, f'{base_path}/price_2w_choppiness_index') - self.price_2w_max: Indexes[Dollars] = Indexes(client, f'{base_path}/price_2w_max') - self.price_2w_min: Indexes[Dollars] = Indexes(client, f'{base_path}/price_2w_min') - self.price_2y_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_2y_ago') - self.price_2y_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_2y_ema') - self.price_2y_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_2y_sma') - self.price_34d_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_34d_ema') - self.price_34d_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_34d_sma') - self.price_3m_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_3m_ago') - self.price_3y_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_3y_ago') - self.price_4y_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_4y_ago') - self.price_4y_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_4y_ema') - self.price_4y_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_4y_sma') - self.price_55d_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_55d_ema') - self.price_55d_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_55d_sma') - self.price_5y_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_5y_ago') - self.price_6m_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_6m_ago') - self.price_6y_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_6y_ago') - self.price_89d_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_89d_ema') - self.price_89d_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_89d_sma') - self.price_8d_ema: Price13dEmaPattern = Price13dEmaPattern(client, 'price_8d_ema') - self.price_8d_sma: Price13dEmaPattern = Price13dEmaPattern(client, 'price_8d_sma') - self.price_8y_ago: Indexes[Dollars] = Indexes(client, f'{base_path}/price_8y_ago') - self.price_ath: Indexes26[Dollars] = Indexes26(client, f'{base_path}/price_ath') - self.price_drawdown: Indexes26[StoredF32] = Indexes26(client, f'{base_path}/price_drawdown') - self.price_true_range: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/price_true_range') - self.price_true_range_2w_sum: Indexes5[StoredF32] = Indexes5(client, f'{base_path}/price_true_range_2w_sum') - -class CatalogTree_Computed_Pools: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.pool: Indexes2[PoolSlug] = Indexes2(client, f'{base_path}/pool') - self.vecs: CatalogTree_Computed_Pools_Vecs = CatalogTree_Computed_Pools_Vecs(client, f'{base_path}/vecs') - -class CatalogTree_Computed_Pools_Vecs: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.axbt: AXbtPattern = AXbtPattern(client, f'{base_path}/AXbt') - self.aaopool: AXbtPattern = AXbtPattern(client, f'{base_path}/AaoPool') - self.antpool: AXbtPattern = AXbtPattern(client, f'{base_path}/AntPool') - self.arkpool: AXbtPattern = AXbtPattern(client, f'{base_path}/ArkPool') - self.asicminer: AXbtPattern = AXbtPattern(client, f'{base_path}/AsicMiner') - self.batpool: AXbtPattern = AXbtPattern(client, f'{base_path}/BatPool') - self.bcmonster: AXbtPattern = AXbtPattern(client, f'{base_path}/BcMonster') - self.bcpoolio: AXbtPattern = AXbtPattern(client, f'{base_path}/BcpoolIo') - self.binancepool: AXbtPattern = AXbtPattern(client, f'{base_path}/BinancePool') - self.bitclub: AXbtPattern = AXbtPattern(client, f'{base_path}/BitClub') - self.bitfufupool: AXbtPattern = AXbtPattern(client, f'{base_path}/BitFuFuPool') - self.bitfury: AXbtPattern = AXbtPattern(client, f'{base_path}/BitFury') - self.bitminter: AXbtPattern = AXbtPattern(client, f'{base_path}/BitMinter') - self.bitalo: AXbtPattern = AXbtPattern(client, f'{base_path}/Bitalo') - self.bitcoinaffiliatenetwork: AXbtPattern = AXbtPattern(client, f'{base_path}/BitcoinAffiliateNetwork') - self.bitcoincom: AXbtPattern = AXbtPattern(client, f'{base_path}/BitcoinCom') - self.bitcoinindia: AXbtPattern = AXbtPattern(client, f'{base_path}/BitcoinIndia') - self.bitcoinrussia: AXbtPattern = AXbtPattern(client, f'{base_path}/BitcoinRussia') - self.bitcoinukraine: AXbtPattern = AXbtPattern(client, f'{base_path}/BitcoinUkraine') - self.bitfarms: AXbtPattern = AXbtPattern(client, f'{base_path}/Bitfarms') - self.bitparking: AXbtPattern = AXbtPattern(client, f'{base_path}/Bitparking') - self.bitsolo: AXbtPattern = AXbtPattern(client, f'{base_path}/Bitsolo') - self.bixin: AXbtPattern = AXbtPattern(client, f'{base_path}/Bixin') - self.blockfills: AXbtPattern = AXbtPattern(client, f'{base_path}/BlockFills') - self.braiinspool: AXbtPattern = AXbtPattern(client, f'{base_path}/BraiinsPool') - self.bravomining: AXbtPattern = AXbtPattern(client, f'{base_path}/BravoMining') - self.btpool: AXbtPattern = AXbtPattern(client, f'{base_path}/BtPool') - self.btccom: AXbtPattern = AXbtPattern(client, f'{base_path}/BtcCom') - self.btcdig: AXbtPattern = AXbtPattern(client, f'{base_path}/BtcDig') - self.btcguild: AXbtPattern = AXbtPattern(client, f'{base_path}/BtcGuild') - self.btclab: AXbtPattern = AXbtPattern(client, f'{base_path}/BtcLab') - self.btcmp: AXbtPattern = AXbtPattern(client, f'{base_path}/BtcMp') - self.btcnuggets: AXbtPattern = AXbtPattern(client, f'{base_path}/BtcNuggets') - self.btcpoolparty: AXbtPattern = AXbtPattern(client, f'{base_path}/BtcPoolParty') - self.btcserv: AXbtPattern = AXbtPattern(client, f'{base_path}/BtcServ') - self.btctop: AXbtPattern = AXbtPattern(client, f'{base_path}/BtcTop') - self.btcc: AXbtPattern = AXbtPattern(client, f'{base_path}/Btcc') - self.bwpool: AXbtPattern = AXbtPattern(client, f'{base_path}/BwPool') - self.bytepool: AXbtPattern = AXbtPattern(client, f'{base_path}/BytePool') - self.canoe: AXbtPattern = AXbtPattern(client, f'{base_path}/Canoe') - self.canoepool: AXbtPattern = AXbtPattern(client, f'{base_path}/CanoePool') - self.carbonnegative: AXbtPattern = AXbtPattern(client, f'{base_path}/CarbonNegative') - self.ckpool: AXbtPattern = AXbtPattern(client, f'{base_path}/CkPool') - self.cloudhashing: AXbtPattern = AXbtPattern(client, f'{base_path}/CloudHashing') - self.coinlab: AXbtPattern = AXbtPattern(client, f'{base_path}/CoinLab') - self.cointerra: AXbtPattern = AXbtPattern(client, f'{base_path}/Cointerra') - self.connectbtc: AXbtPattern = AXbtPattern(client, f'{base_path}/ConnectBtc') - self.dpool: AXbtPattern = AXbtPattern(client, f'{base_path}/DPool') - self.dcexploration: AXbtPattern = AXbtPattern(client, f'{base_path}/DcExploration') - self.dcex: AXbtPattern = AXbtPattern(client, f'{base_path}/Dcex') - self.digitalbtc: AXbtPattern = AXbtPattern(client, f'{base_path}/DigitalBtc') - self.digitalxmintsy: AXbtPattern = AXbtPattern(client, f'{base_path}/DigitalXMintsy') - self.eclipsemc: AXbtPattern = AXbtPattern(client, f'{base_path}/EclipseMc') - self.eightbaochi: AXbtPattern = AXbtPattern(client, f'{base_path}/EightBaochi') - self.ekanembtc: AXbtPattern = AXbtPattern(client, f'{base_path}/EkanemBtc') - self.eligius: AXbtPattern = AXbtPattern(client, f'{base_path}/Eligius') - self.emcdpool: AXbtPattern = AXbtPattern(client, f'{base_path}/EmcdPool') - self.entrustcharitypool: AXbtPattern = AXbtPattern(client, f'{base_path}/EntrustCharityPool') - self.eobot: AXbtPattern = AXbtPattern(client, f'{base_path}/Eobot') - self.exxbw: AXbtPattern = AXbtPattern(client, f'{base_path}/ExxBw') - self.f2pool: AXbtPattern = AXbtPattern(client, f'{base_path}/F2Pool') - self.fiftyeightcoin: AXbtPattern = AXbtPattern(client, f'{base_path}/FiftyEightCoin') - self.foundryusa: AXbtPattern = AXbtPattern(client, f'{base_path}/FoundryUsa') - self.futurebitapollosolo: AXbtPattern = AXbtPattern(client, f'{base_path}/FutureBitApolloSolo') - self.gbminers: AXbtPattern = AXbtPattern(client, f'{base_path}/GbMiners') - self.ghashio: AXbtPattern = AXbtPattern(client, f'{base_path}/GhashIo') - self.givemecoins: AXbtPattern = AXbtPattern(client, f'{base_path}/GiveMeCoins') - self.gogreenlight: AXbtPattern = AXbtPattern(client, f'{base_path}/GoGreenLight') - self.haozhuzhu: AXbtPattern = AXbtPattern(client, f'{base_path}/HaoZhuZhu') - self.haominer: AXbtPattern = AXbtPattern(client, f'{base_path}/Haominer') - self.hashbx: AXbtPattern = AXbtPattern(client, f'{base_path}/HashBx') - self.hashpool: AXbtPattern = AXbtPattern(client, f'{base_path}/HashPool') - self.helix: AXbtPattern = AXbtPattern(client, f'{base_path}/Helix') - self.hhtt: AXbtPattern = AXbtPattern(client, f'{base_path}/Hhtt') - self.hotpool: AXbtPattern = AXbtPattern(client, f'{base_path}/HotPool') - self.hummerpool: AXbtPattern = AXbtPattern(client, f'{base_path}/Hummerpool') - self.huobipool: AXbtPattern = AXbtPattern(client, f'{base_path}/HuobiPool') - self.innopolistech: AXbtPattern = AXbtPattern(client, f'{base_path}/InnopolisTech') - self.kanopool: AXbtPattern = AXbtPattern(client, f'{base_path}/KanoPool') - self.kncminer: AXbtPattern = AXbtPattern(client, f'{base_path}/KncMiner') - self.kucoinpool: AXbtPattern = AXbtPattern(client, f'{base_path}/KuCoinPool') - self.lubiancom: AXbtPattern = AXbtPattern(client, f'{base_path}/LubianCom') - self.luckypool: AXbtPattern = AXbtPattern(client, f'{base_path}/LuckyPool') - self.luxor: AXbtPattern = AXbtPattern(client, f'{base_path}/Luxor') - self.marapool: AXbtPattern = AXbtPattern(client, f'{base_path}/MaraPool') - self.maxbtc: AXbtPattern = AXbtPattern(client, f'{base_path}/MaxBtc') - self.maxipool: AXbtPattern = AXbtPattern(client, f'{base_path}/MaxiPool') - self.megabigpower: AXbtPattern = AXbtPattern(client, f'{base_path}/MegaBigPower') - self.minerium: AXbtPattern = AXbtPattern(client, f'{base_path}/Minerium') - self.miningcity: AXbtPattern = AXbtPattern(client, f'{base_path}/MiningCity') - self.miningdutch: AXbtPattern = AXbtPattern(client, f'{base_path}/MiningDutch') - self.miningkings: AXbtPattern = AXbtPattern(client, f'{base_path}/MiningKings') - self.miningsquared: AXbtPattern = AXbtPattern(client, f'{base_path}/MiningSquared') - self.mmpool: AXbtPattern = AXbtPattern(client, f'{base_path}/Mmpool') - self.mtred: AXbtPattern = AXbtPattern(client, f'{base_path}/MtRed') - self.multicoinco: AXbtPattern = AXbtPattern(client, f'{base_path}/MultiCoinCo') - self.multipool: AXbtPattern = AXbtPattern(client, f'{base_path}/Multipool') - self.mybtccoinpool: AXbtPattern = AXbtPattern(client, f'{base_path}/MyBtcCoinPool') - self.neopool: AXbtPattern = AXbtPattern(client, f'{base_path}/Neopool') - self.nexious: AXbtPattern = AXbtPattern(client, f'{base_path}/Nexious') - self.nicehash: AXbtPattern = AXbtPattern(client, f'{base_path}/NiceHash') - self.nmcbit: AXbtPattern = AXbtPattern(client, f'{base_path}/NmcBit') - self.novablock: AXbtPattern = AXbtPattern(client, f'{base_path}/NovaBlock') - self.ocean: AXbtPattern = AXbtPattern(client, f'{base_path}/Ocean') - self.okexpool: AXbtPattern = AXbtPattern(client, f'{base_path}/OkExPool') - self.okminer: AXbtPattern = AXbtPattern(client, f'{base_path}/OkMiner') - self.okkong: AXbtPattern = AXbtPattern(client, f'{base_path}/Okkong') - self.okpooltop: AXbtPattern = AXbtPattern(client, f'{base_path}/OkpoolTop') - self.onehash: AXbtPattern = AXbtPattern(client, f'{base_path}/OneHash') - self.onem1x: AXbtPattern = AXbtPattern(client, f'{base_path}/OneM1x') - self.onethash: AXbtPattern = AXbtPattern(client, f'{base_path}/OneThash') - self.ozcoin: AXbtPattern = AXbtPattern(client, f'{base_path}/OzCoin') - self.phashio: AXbtPattern = AXbtPattern(client, f'{base_path}/PHashIo') - self.parasite: AXbtPattern = AXbtPattern(client, f'{base_path}/Parasite') - self.patels: AXbtPattern = AXbtPattern(client, f'{base_path}/Patels') - self.pegapool: AXbtPattern = AXbtPattern(client, f'{base_path}/PegaPool') - self.phoenix: AXbtPattern = AXbtPattern(client, f'{base_path}/Phoenix') - self.polmine: AXbtPattern = AXbtPattern(client, f'{base_path}/Polmine') - self.pool175btc: AXbtPattern = AXbtPattern(client, f'{base_path}/Pool175btc') - self.pool50btc: AXbtPattern = AXbtPattern(client, f'{base_path}/Pool50btc') - self.poolin: AXbtPattern = AXbtPattern(client, f'{base_path}/Poolin') - self.portlandhodl: AXbtPattern = AXbtPattern(client, f'{base_path}/PortlandHodl') - self.publicpool: AXbtPattern = AXbtPattern(client, f'{base_path}/PublicPool') - self.purebtccom: AXbtPattern = AXbtPattern(client, f'{base_path}/PureBtcCom') - self.rawpool: AXbtPattern = AXbtPattern(client, f'{base_path}/Rawpool') - self.rigpool: AXbtPattern = AXbtPattern(client, f'{base_path}/RigPool') - self.sbicrypto: AXbtPattern = AXbtPattern(client, f'{base_path}/SbiCrypto') - self.secpool: AXbtPattern = AXbtPattern(client, f'{base_path}/SecPool') - self.secretsuperstar: AXbtPattern = AXbtPattern(client, f'{base_path}/SecretSuperstar') - self.sevenpool: AXbtPattern = AXbtPattern(client, f'{base_path}/SevenPool') - self.shawnp0wers: AXbtPattern = AXbtPattern(client, f'{base_path}/ShawnP0wers') - self.sigmapoolcom: AXbtPattern = AXbtPattern(client, f'{base_path}/SigmapoolCom') - self.simplecoinus: AXbtPattern = AXbtPattern(client, f'{base_path}/SimplecoinUs') - self.solock: AXbtPattern = AXbtPattern(client, f'{base_path}/SoloCk') - self.spiderpool: AXbtPattern = AXbtPattern(client, f'{base_path}/SpiderPool') - self.stminingcorp: AXbtPattern = AXbtPattern(client, f'{base_path}/StMiningCorp') - self.tangpool: AXbtPattern = AXbtPattern(client, f'{base_path}/Tangpool') - self.tatmaspool: AXbtPattern = AXbtPattern(client, f'{base_path}/TatmasPool') - self.tbdice: AXbtPattern = AXbtPattern(client, f'{base_path}/TbDice') - self.telco214: AXbtPattern = AXbtPattern(client, f'{base_path}/Telco214') - self.terrapool: AXbtPattern = AXbtPattern(client, f'{base_path}/TerraPool') - self.tiger: AXbtPattern = AXbtPattern(client, f'{base_path}/Tiger') - self.tigerpoolnet: AXbtPattern = AXbtPattern(client, f'{base_path}/TigerpoolNet') - self.titan: AXbtPattern = AXbtPattern(client, f'{base_path}/Titan') - self.transactioncoinmining: AXbtPattern = AXbtPattern(client, f'{base_path}/TransactionCoinMining') - self.trickysbtcpool: AXbtPattern = AXbtPattern(client, f'{base_path}/TrickysBtcPool') - self.triplemining: AXbtPattern = AXbtPattern(client, f'{base_path}/TripleMining') - self.twentyoneinc: AXbtPattern = AXbtPattern(client, f'{base_path}/TwentyOneInc') - self.ultimuspool: AXbtPattern = AXbtPattern(client, f'{base_path}/UltimusPool') - self.unknown: AXbtPattern = AXbtPattern(client, f'{base_path}/Unknown') - self.unomp: AXbtPattern = AXbtPattern(client, f'{base_path}/Unomp') - self.viabtc: AXbtPattern = AXbtPattern(client, f'{base_path}/ViaBtc') - self.waterhole: AXbtPattern = AXbtPattern(client, f'{base_path}/Waterhole') - self.wayicn: AXbtPattern = AXbtPattern(client, f'{base_path}/WayiCn') - self.whitepool: AXbtPattern = AXbtPattern(client, f'{base_path}/WhitePool') - self.wk057: AXbtPattern = AXbtPattern(client, f'{base_path}/Wk057') - self.yourbtcnet: AXbtPattern = AXbtPattern(client, f'{base_path}/YourbtcNet') - self.zulupool: AXbtPattern = AXbtPattern(client, f'{base_path}/Zulupool') - -class CatalogTree_Computed_Price: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.price_close: Indexes3[Dollars] = Indexes3(client, f'{base_path}/price_close') - self.price_close_in_cents: Indexes13[Cents] = Indexes13(client, f'{base_path}/price_close_in_cents') - self.price_close_in_sats: Indexes3[Sats] = Indexes3(client, f'{base_path}/price_close_in_sats') - self.price_high: Indexes3[Dollars] = Indexes3(client, f'{base_path}/price_high') - self.price_high_in_cents: Indexes13[Cents] = Indexes13(client, f'{base_path}/price_high_in_cents') - self.price_high_in_sats: Indexes3[Sats] = Indexes3(client, f'{base_path}/price_high_in_sats') - self.price_low: Indexes3[Dollars] = Indexes3(client, f'{base_path}/price_low') - self.price_low_in_cents: Indexes13[Cents] = Indexes13(client, f'{base_path}/price_low_in_cents') - self.price_low_in_sats: Indexes3[Sats] = Indexes3(client, f'{base_path}/price_low_in_sats') - self.price_ohlc: Indexes3[OHLCDollars] = Indexes3(client, f'{base_path}/price_ohlc') - self.price_ohlc_in_sats: Indexes3[OHLCSats] = Indexes3(client, f'{base_path}/price_ohlc_in_sats') - self.price_open: Indexes3[Dollars] = Indexes3(client, f'{base_path}/price_open') - self.price_open_in_cents: Indexes13[Cents] = Indexes13(client, f'{base_path}/price_open_in_cents') - self.price_open_in_sats: Indexes3[Sats] = Indexes3(client, f'{base_path}/price_open_in_sats') - -class CatalogTree_Computed_Stateful: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.addr_count: Indexes3[StoredU64] = Indexes3(client, f'{base_path}/addr_count') - self.address_cohorts: CatalogTree_Computed_Stateful_AddressCohorts = CatalogTree_Computed_Stateful_AddressCohorts(client, f'{base_path}/address_cohorts') - self.addresses_data: CatalogTree_Computed_Stateful_AddressesData = CatalogTree_Computed_Stateful_AddressesData(client, f'{base_path}/addresses_data') - self.addresstype_to_height_to_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, f'{base_path}/addresstype_to_height_to_addr_count') - self.addresstype_to_height_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, f'{base_path}/addresstype_to_height_to_empty_addr_count') - self.addresstype_to_indexes_to_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, f'{base_path}/addresstype_to_indexes_to_addr_count') - self.addresstype_to_indexes_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, f'{base_path}/addresstype_to_indexes_to_empty_addr_count') - self.any_address_indexes: AddresstypeToHeightToAddrCountPattern[AnyAddressIndex] = AddresstypeToHeightToAddrCountPattern(client, f'{base_path}/any_address_indexes') - self.chain_state: Indexes2[SupplyState] = Indexes2(client, f'{base_path}/chain_state') - self.empty_addr_count: Indexes3[StoredU64] = Indexes3(client, f'{base_path}/empty_addr_count') - self.emptyaddressindex: Indexes29[EmptyAddressIndex] = Indexes29(client, f'{base_path}/emptyaddressindex') - self.loadedaddressindex: Indexes30[LoadedAddressIndex] = Indexes30(client, f'{base_path}/loadedaddressindex') - self.market_cap: Indexes26[Dollars] = Indexes26(client, f'{base_path}/market_cap') - self.opreturn_supply: SupplyPattern = SupplyPattern(client, f'{base_path}/opreturn_supply') - self.unspendable_supply: SupplyPattern = SupplyPattern(client, f'{base_path}/unspendable_supply') - self.utxo_cohorts: CatalogTree_Computed_Stateful_UtxoCohorts = CatalogTree_Computed_Stateful_UtxoCohorts(client, f'{base_path}/utxo_cohorts') - -class CatalogTree_Computed_Stateful_AddressCohorts: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.amount_range: CatalogTree_Computed_Stateful_AddressCohorts_AmountRange = CatalogTree_Computed_Stateful_AddressCohorts_AmountRange(client, f'{base_path}/amount_range') - self.ge_amount: CatalogTree_Computed_Stateful_AddressCohorts_GeAmount = CatalogTree_Computed_Stateful_AddressCohorts_GeAmount(client, f'{base_path}/ge_amount') - self.lt_amount: CatalogTree_Computed_Stateful_AddressCohorts_LtAmount = CatalogTree_Computed_Stateful_AddressCohorts_LtAmount(client, f'{base_path}/lt_amount') - -class CatalogTree_Computed_Stateful_AddressCohorts_AmountRange: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._0sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_0sats') - self._100btc_to_1k_btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_100btc_to_1k_btc') - self._100k_btc_or_more: _0satsPattern = _0satsPattern(client, f'{base_path}/_100k_btc_or_more') - self._100k_sats_to_1m_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_100k_sats_to_1m_sats') - self._100sats_to_1k_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_100sats_to_1k_sats') - self._10btc_to_100btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_10btc_to_100btc') - self._10k_btc_to_100k_btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_10k_btc_to_100k_btc') - self._10k_sats_to_100k_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_10k_sats_to_100k_sats') - self._10m_sats_to_1btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_10m_sats_to_1btc') - self._10sats_to_100sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_10sats_to_100sats') - self._1btc_to_10btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_1btc_to_10btc') - self._1k_btc_to_10k_btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_1k_btc_to_10k_btc') - self._1k_sats_to_10k_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_1k_sats_to_10k_sats') - self._1m_sats_to_10m_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_1m_sats_to_10m_sats') - self._1sat_to_10sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_1sat_to_10sats') - -class CatalogTree_Computed_Stateful_AddressCohorts_GeAmount: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._100btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_100btc') - self._100k_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_100k_sats') - self._100sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_100sats') - self._10btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_10btc') - self._10k_btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_10k_btc') - self._10k_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_10k_sats') - self._10m_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_10m_sats') - self._10sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_10sats') - self._1btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_1btc') - self._1k_btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_1k_btc') - self._1k_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_1k_sats') - self._1m_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_1m_sats') - self._1sat: _0satsPattern = _0satsPattern(client, f'{base_path}/_1sat') - -class CatalogTree_Computed_Stateful_AddressCohorts_LtAmount: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._100btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_100btc') - self._100k_btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_100k_btc') - self._100k_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_100k_sats') - self._100sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_100sats') - self._10btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_10btc') - self._10k_btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_10k_btc') - self._10k_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_10k_sats') - self._10m_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_10m_sats') - self._10sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_10sats') - self._1btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_1btc') - self._1k_btc: _0satsPattern = _0satsPattern(client, f'{base_path}/_1k_btc') - self._1k_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_1k_sats') - self._1m_sats: _0satsPattern = _0satsPattern(client, f'{base_path}/_1m_sats') - -class CatalogTree_Computed_Stateful_AddressesData: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.empty: Indexes29[EmptyAddressData] = Indexes29(client, f'{base_path}/empty') - self.loaded: Indexes30[LoadedAddressData] = Indexes30(client, f'{base_path}/loaded') - -class CatalogTree_Computed_Stateful_UtxoCohorts: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.age_range: CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange = CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange(client, f'{base_path}/age_range') - self.all: CatalogTree_Computed_Stateful_UtxoCohorts_All = CatalogTree_Computed_Stateful_UtxoCohorts_All(client, f'{base_path}/all') - self.amount_range: CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange = CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange(client, f'{base_path}/amount_range') - self.epoch: CatalogTree_Computed_Stateful_UtxoCohorts_Epoch = CatalogTree_Computed_Stateful_UtxoCohorts_Epoch(client, f'{base_path}/epoch') - self.ge_amount: CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount = CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount(client, f'{base_path}/ge_amount') - self.lt_amount: CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount = CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount(client, f'{base_path}/lt_amount') - self.max_age: CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge = CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge(client, f'{base_path}/max_age') - self.min_age: CatalogTree_Computed_Stateful_UtxoCohorts_MinAge = CatalogTree_Computed_Stateful_UtxoCohorts_MinAge(client, f'{base_path}/min_age') - self.term: CatalogTree_Computed_Stateful_UtxoCohorts_Term = CatalogTree_Computed_Stateful_UtxoCohorts_Term(client, f'{base_path}/term') - self.type_: CatalogTree_Computed_Stateful_UtxoCohorts_Type = CatalogTree_Computed_Stateful_UtxoCohorts_Type(client, f'{base_path}/type_') - self.year: CatalogTree_Computed_Stateful_UtxoCohorts_Year = CatalogTree_Computed_Stateful_UtxoCohorts_Year(client, f'{base_path}/year') - -class CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._10y_to_12y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_10y_to_12y') - self._12y_to_15y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_12y_to_15y') - self._1d_to_1w: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_1d_to_1w') - self._1m_to_2m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_1m_to_2m') - self._1w_to_1m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_1w_to_1m') - self._1y_to_2y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_1y_to_2y') - self._2m_to_3m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2m_to_3m') - self._2y_to_3y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2y_to_3y') - self._3m_to_4m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_3m_to_4m') - self._3y_to_4y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_3y_to_4y') - self._4m_to_5m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_4m_to_5m') - self._4y_to_5y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_4y_to_5y') - self._5m_to_6m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_5m_to_6m') - self._5y_to_6y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_5y_to_6y') - self._6m_to_1y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_6m_to_1y') - self._6y_to_7y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_6y_to_7y') - self._7y_to_8y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_7y_to_8y') - self._8y_to_10y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_8y_to_10y') - self.from_15y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/from_15y') - self.up_to_1d: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/up_to_1d') - -class CatalogTree_Computed_Stateful_UtxoCohorts_All: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.activity: ActivityPattern = ActivityPattern(client, f'{base_path}/activity') - self.price_paid: PricePaidPattern2 = PricePaidPattern2(client, f'{base_path}/price_paid') - self.realized: RealizedPattern3 = RealizedPattern3(client, f'{base_path}/realized') - self.relative: CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative = CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative(client, f'{base_path}/relative') - self.supply: SupplyPattern2 = SupplyPattern2(client, f'{base_path}/supply') - self.unrealized: UnrealizedPattern = UnrealizedPattern(client, f'{base_path}/unrealized') - -class CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/neg_unrealized_loss_rel_to_own_total_unrealized_pnl') - self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: Indexes26[StoredF32] = Indexes26(client, f'{base_path}/net_unrealized_pnl_rel_to_own_total_unrealized_pnl') - self.supply_in_loss_rel_to_own_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_loss_rel_to_own_supply') - self.supply_in_profit_rel_to_own_supply: Indexes27[StoredF64] = Indexes27(client, f'{base_path}/supply_in_profit_rel_to_own_supply') - self.unrealized_loss_rel_to_own_total_unrealized_pnl: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_loss_rel_to_own_total_unrealized_pnl') - self.unrealized_profit_rel_to_own_total_unrealized_pnl: Indexes27[StoredF32] = Indexes27(client, f'{base_path}/unrealized_profit_rel_to_own_total_unrealized_pnl') - -class CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._0sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_0sats') - self._100btc_to_1k_btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100btc_to_1k_btc') - self._100k_btc_or_more: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100k_btc_or_more') - self._100k_sats_to_1m_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100k_sats_to_1m_sats') - self._100sats_to_1k_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100sats_to_1k_sats') - self._10btc_to_100btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10btc_to_100btc') - self._10k_btc_to_100k_btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10k_btc_to_100k_btc') - self._10k_sats_to_100k_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10k_sats_to_100k_sats') - self._10m_sats_to_1btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10m_sats_to_1btc') - self._10sats_to_100sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10sats_to_100sats') - self._1btc_to_10btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1btc_to_10btc') - self._1k_btc_to_10k_btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1k_btc_to_10k_btc') - self._1k_sats_to_10k_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1k_sats_to_10k_sats') - self._1m_sats_to_10m_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1m_sats_to_10m_sats') - self._1sat_to_10sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1sat_to_10sats') - -class CatalogTree_Computed_Stateful_UtxoCohorts_Epoch: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._0: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_0') - self._1: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_1') - self._2: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2') - self._3: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_3') - self._4: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_4') - -class CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._100btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100btc') - self._100k_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100k_sats') - self._100sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100sats') - self._10btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10btc') - self._10k_btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10k_btc') - self._10k_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10k_sats') - self._10m_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10m_sats') - self._10sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10sats') - self._1btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1btc') - self._1k_btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1k_btc') - self._1k_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1k_sats') - self._1m_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1m_sats') - self._1sat: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1sat') - -class CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._100btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100btc') - self._100k_btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100k_btc') - self._100k_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100k_sats') - self._100sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_100sats') - self._10btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10btc') - self._10k_btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10k_btc') - self._10k_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10k_sats') - self._10m_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10m_sats') - self._10sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_10sats') - self._1btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1btc') - self._1k_btc: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1k_btc') - self._1k_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1k_sats') - self._1m_sats: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/_1m_sats') - -class CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._10y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_10y') - self._12y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_12y') - self._15y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_15y') - self._1m: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_1m') - self._1w: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_1w') - self._1y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_1y') - self._2m: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_2m') - self._2y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_2y') - self._3m: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_3m') - self._3y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_3y') - self._4m: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_4m') - self._4y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_4y') - self._5m: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_5m') - self._5y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_5y') - self._6m: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_6m') - self._6y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_6y') - self._7y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_7y') - self._8y: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/_8y') - -class CatalogTree_Computed_Stateful_UtxoCohorts_MinAge: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._10y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_10y') - self._12y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_12y') - self._1d: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_1d') - self._1m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_1m') - self._1w: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_1w') - self._1y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_1y') - self._2m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2m') - self._2y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2y') - self._3m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_3m') - self._3y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_3y') - self._4m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_4m') - self._4y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_4y') - self._5m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_5m') - self._5y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_5y') - self._6m: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_6m') - self._6y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_6y') - self._7y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_7y') - self._8y: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_8y') - -class CatalogTree_Computed_Stateful_UtxoCohorts_Term: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.long: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/long') - self.short: UpTo1dPattern = UpTo1dPattern(client, f'{base_path}/short') - -class CatalogTree_Computed_Stateful_UtxoCohorts_Type: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.empty: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/empty') - self.p2a: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/p2a') - self.p2ms: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/p2ms') - self.p2pk33: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/p2pk33') - self.p2pk65: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/p2pk65') - self.p2pkh: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/p2pkh') - self.p2sh: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/p2sh') - self.p2tr: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/p2tr') - self.p2wpkh: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/p2wpkh') - self.p2wsh: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/p2wsh') - self.unknown: _0satsPattern2 = _0satsPattern2(client, f'{base_path}/unknown') - -class CatalogTree_Computed_Stateful_UtxoCohorts_Year: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._2009: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2009') - self._2010: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2010') - self._2011: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2011') - self._2012: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2012') - self._2013: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2013') - self._2014: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2014') - self._2015: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2015') - self._2016: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2016') - self._2017: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2017') - self._2018: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2018') - self._2019: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2019') - self._2020: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2020') - self._2021: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2021') - self._2022: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2022') - self._2023: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2023') - self._2024: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2024') - self._2025: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2025') - self._2026: _10yTo12yPattern = _10yTo12yPattern(client, f'{base_path}/_2026') - -class CatalogTree_Computed_Txins: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.txoutindex: Indexes24[TxOutIndex] = Indexes24(client, f'{base_path}/txoutindex') - self.value: Indexes24[Sats] = Indexes24(client, f'{base_path}/value') - -class CatalogTree_Computed_Txouts: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.txinindex: Indexes25[TxInIndex] = Indexes25(client, f'{base_path}/txinindex') - -class CatalogTree_Indexed: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.address: CatalogTree_Indexed_Address = CatalogTree_Indexed_Address(client, f'{base_path}/address') - self.block: CatalogTree_Indexed_Block = CatalogTree_Indexed_Block(client, f'{base_path}/block') - self.output: CatalogTree_Indexed_Output = CatalogTree_Indexed_Output(client, f'{base_path}/output') - self.tx: CatalogTree_Indexed_Tx = CatalogTree_Indexed_Tx(client, f'{base_path}/tx') - self.txin: CatalogTree_Indexed_Txin = CatalogTree_Indexed_Txin(client, f'{base_path}/txin') - self.txout: CatalogTree_Indexed_Txout = CatalogTree_Indexed_Txout(client, f'{base_path}/txout') - -class CatalogTree_Indexed_Address: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.first_p2aaddressindex: Indexes2[P2AAddressIndex] = Indexes2(client, f'{base_path}/first_p2aaddressindex') - self.first_p2pk33addressindex: Indexes2[P2PK33AddressIndex] = Indexes2(client, f'{base_path}/first_p2pk33addressindex') - self.first_p2pk65addressindex: Indexes2[P2PK65AddressIndex] = Indexes2(client, f'{base_path}/first_p2pk65addressindex') - self.first_p2pkhaddressindex: Indexes2[P2PKHAddressIndex] = Indexes2(client, f'{base_path}/first_p2pkhaddressindex') - self.first_p2shaddressindex: Indexes2[P2SHAddressIndex] = Indexes2(client, f'{base_path}/first_p2shaddressindex') - self.first_p2traddressindex: Indexes2[P2TRAddressIndex] = Indexes2(client, f'{base_path}/first_p2traddressindex') - self.first_p2wpkhaddressindex: Indexes2[P2WPKHAddressIndex] = Indexes2(client, f'{base_path}/first_p2wpkhaddressindex') - self.first_p2wshaddressindex: Indexes2[P2WSHAddressIndex] = Indexes2(client, f'{base_path}/first_p2wshaddressindex') - self.p2abytes: Indexes16[P2ABytes] = Indexes16(client, f'{base_path}/p2abytes') - self.p2pk33bytes: Indexes17[P2PK33Bytes] = Indexes17(client, f'{base_path}/p2pk33bytes') - self.p2pk65bytes: Indexes18[P2PK65Bytes] = Indexes18(client, f'{base_path}/p2pk65bytes') - self.p2pkhbytes: Indexes19[P2PKHBytes] = Indexes19(client, f'{base_path}/p2pkhbytes') - self.p2shbytes: Indexes20[P2SHBytes] = Indexes20(client, f'{base_path}/p2shbytes') - self.p2trbytes: Indexes21[P2TRBytes] = Indexes21(client, f'{base_path}/p2trbytes') - self.p2wpkhbytes: Indexes22[P2WPKHBytes] = Indexes22(client, f'{base_path}/p2wpkhbytes') - self.p2wshbytes: Indexes23[P2WSHBytes] = Indexes23(client, f'{base_path}/p2wshbytes') - -class CatalogTree_Indexed_Block: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.blockhash: Indexes2[BlockHash] = Indexes2(client, f'{base_path}/blockhash') - self.difficulty: Indexes2[StoredF64] = Indexes2(client, f'{base_path}/difficulty') - self.timestamp: Indexes2[Timestamp] = Indexes2(client, f'{base_path}/timestamp') - self.total_size: Indexes2[StoredU64] = Indexes2(client, f'{base_path}/total_size') - self.weight: Indexes2[Weight] = Indexes2(client, f'{base_path}/weight') - -class CatalogTree_Indexed_Output: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.first_emptyoutputindex: Indexes2[EmptyOutputIndex] = Indexes2(client, f'{base_path}/first_emptyoutputindex') - self.first_opreturnindex: Indexes2[OpReturnIndex] = Indexes2(client, f'{base_path}/first_opreturnindex') - self.first_p2msoutputindex: Indexes2[P2MSOutputIndex] = Indexes2(client, f'{base_path}/first_p2msoutputindex') - self.first_unknownoutputindex: Indexes2[UnknownOutputIndex] = Indexes2(client, f'{base_path}/first_unknownoutputindex') - self.txindex: MetricNode[TxIndex] = MetricNode(client, f'{base_path}/txindex') - -class CatalogTree_Indexed_Tx: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.base_size: Indexes6[StoredU32] = Indexes6(client, f'{base_path}/base_size') - self.first_txindex: Indexes2[TxIndex] = Indexes2(client, f'{base_path}/first_txindex') - self.first_txinindex: Indexes6[TxInIndex] = Indexes6(client, f'{base_path}/first_txinindex') - self.first_txoutindex: Indexes6[TxOutIndex] = Indexes6(client, f'{base_path}/first_txoutindex') - self.height: Indexes6[Height] = Indexes6(client, f'{base_path}/height') - self.is_explicitly_rbf: Indexes6[StoredBool] = Indexes6(client, f'{base_path}/is_explicitly_rbf') - self.rawlocktime: Indexes6[RawLockTime] = Indexes6(client, f'{base_path}/rawlocktime') - self.total_size: Indexes6[StoredU32] = Indexes6(client, f'{base_path}/total_size') - self.txid: Indexes6[Txid] = Indexes6(client, f'{base_path}/txid') - self.txversion: Indexes6[TxVersion] = Indexes6(client, f'{base_path}/txversion') - -class CatalogTree_Indexed_Txin: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.first_txinindex: Indexes2[TxInIndex] = Indexes2(client, f'{base_path}/first_txinindex') - self.outpoint: Indexes24[OutPoint] = Indexes24(client, f'{base_path}/outpoint') - self.outputtype: Indexes24[OutputType] = Indexes24(client, f'{base_path}/outputtype') - self.txindex: Indexes24[TxIndex] = Indexes24(client, f'{base_path}/txindex') - self.typeindex: Indexes24[TypeIndex] = Indexes24(client, f'{base_path}/typeindex') - -class CatalogTree_Indexed_Txout: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.first_txoutindex: Indexes2[TxOutIndex] = Indexes2(client, f'{base_path}/first_txoutindex') - self.outputtype: Indexes25[OutputType] = Indexes25(client, f'{base_path}/outputtype') - self.txindex: Indexes25[TxIndex] = Indexes25(client, f'{base_path}/txindex') - self.typeindex: Indexes25[TypeIndex] = Indexes25(client, f'{base_path}/typeindex') - self.value: Indexes25[Sats] = Indexes25(client, f'{base_path}/value') - -class BrkClient(BrkClientBase): - """Main BRK client with catalog tree and API methods.""" - - def __init__(self, base_url: str = 'http://localhost:3000', timeout: float = 30.0): - super().__init__(base_url, timeout) - self.tree = CatalogTree(self) - - def get_api_address_by_address(self, address: str) -> AddressStats: - """Address information. - - Retrieve comprehensive information about a Bitcoin address including balance, transaction history, UTXOs, and estimated investment metrics. Supports all standard Bitcoin address types (P2PKH, P2SH, P2WPKH, P2WSH, P2TR, etc.).""" - return self.get(f'/api/address/{address}') - - def get_api_address_by_address_txs(self, address: str, after_txid: Optional[str] = None, limit: Optional[str] = None) -> List[Txid]: - """Address transaction IDs. - - Get transaction IDs for an address, newest first. Use after_txid for pagination.""" - params = [] - if after_txid is not None: params.append(f'after_txid={after_txid}') - if limit is not None: params.append(f'limit={limit}') - query = '&'.join(params) - return self.get(f'/api/address/{address}/txs{"?" + query if query else ""}') - - def get_api_address_by_address_txs_chain(self, address: str, after_txid: Optional[str] = None, limit: Optional[str] = None) -> List[Txid]: - """Address confirmed transactions. - - Get confirmed transaction IDs for an address, 25 per page. Use ?after_txid= for pagination.""" - params = [] - if after_txid is not None: params.append(f'after_txid={after_txid}') - if limit is not None: params.append(f'limit={limit}') - query = '&'.join(params) - return self.get(f'/api/address/{address}/txs/chain{"?" + query if query else ""}') - - def get_api_address_by_address_txs_mempool(self, address: str) -> List[Txid]: - """Address mempool transactions. - - Get unconfirmed transaction IDs for an address from the mempool (up to 50).""" - return self.get(f'/api/address/{address}/txs/mempool') - - def get_api_address_by_address_utxo(self, address: str) -> List[Utxo]: - """Address UTXOs. - - Get unspent transaction outputs for an address.""" - return self.get(f'/api/address/{address}/utxo') - - def get_api_block_height_by_height(self, height: str) -> BlockInfo: - """Block by height. - - Retrieve block information by block height. Returns block metadata including hash, timestamp, difficulty, size, weight, and transaction count.""" - return self.get(f'/api/block-height/{height}') - - def get_api_block_by_hash(self, hash: str) -> BlockInfo: - """Block information. - - Retrieve block information by block hash. Returns block metadata including height, timestamp, difficulty, size, weight, and transaction count.""" - return self.get(f'/api/block/{hash}') - - def get_api_block_by_hash_raw(self, hash: str) -> List[int]: - """Raw block. - - Returns the raw block data in binary format.""" - return self.get(f'/api/block/{hash}/raw') - - def get_api_block_by_hash_status(self, hash: str) -> BlockStatus: - """Block status. - - Retrieve the status of a block. Returns whether the block is in the best chain and, if so, its height and the hash of the next block.""" - return self.get(f'/api/block/{hash}/status') - - def get_api_block_by_hash_txid_by_index(self, hash: str, index: str) -> Txid: - """Transaction ID at index. - - Retrieve a single transaction ID at a specific index within a block. Returns plain text txid.""" - return self.get(f'/api/block/{hash}/txid/{index}') - - def get_api_block_by_hash_txids(self, hash: str) -> List[Txid]: - """Block transaction IDs. - - Retrieve all transaction IDs in a block by block hash.""" - return self.get(f'/api/block/{hash}/txids') - - def get_api_block_by_hash_txs_by_start_index(self, hash: str, start_index: str) -> List[Transaction]: - """Block transactions (paginated). - - Retrieve transactions in a block by block hash, starting from the specified index. Returns up to 25 transactions at a time.""" - return self.get(f'/api/block/{hash}/txs/{start_index}') - - def get_api_blocks(self) -> List[BlockInfo]: - """Recent blocks. - - Retrieve the last 10 blocks. Returns block metadata for each block.""" - return self.get('/api/blocks') - - def get_api_blocks_by_height(self, height: str) -> List[BlockInfo]: - """Blocks from height. - - Retrieve up to 10 blocks going backwards from the given height. For example, height=100 returns blocks 100, 99, 98, ..., 91. Height=0 returns only block 0.""" - return self.get(f'/api/blocks/{height}') - - def get_api_mempool_info(self) -> MempoolInfo: - """Mempool statistics. - - Get current mempool statistics including transaction count, total vsize, and total fees.""" - return self.get('/api/mempool/info') - - def get_api_mempool_txids(self) -> List[Txid]: - """Mempool transaction IDs. - - Get all transaction IDs currently in the mempool.""" - return self.get('/api/mempool/txids') - - def get_api_metric_by_metric(self, metric: str) -> List[Index]: - """Get supported indexes for a metric. - - Returns the list of indexes are supported by the specified metric. For example, `realized_price` might be available on dateindex, weekindex, and monthindex.""" - return self.get(f'/api/metric/{metric}') - - def get_api_metric_by_metric_by_index(self, metric: str, index: str, from_: Optional[str] = None, to: Optional[str] = None, count: Optional[str] = None, format: Optional[str] = None) -> MetricData: - """Get metric data. - - Fetch data for a specific metric at the given index. Use query parameters to filter by date range and format (json/csv).""" - params = [] - if from_ is not None: params.append(f'from={from_}') - if to is not None: params.append(f'to={to}') - if count is not None: params.append(f'count={count}') - if format is not None: params.append(f'format={format}') - query = '&'.join(params) - return self.get(f'/api/metric/{metric}/{index}{"?" + query if query else ""}') - - def get_api_metrics_bulk(self, metrics: str, index: str, from_: Optional[str] = None, to: Optional[str] = None, count: Optional[str] = None, format: Optional[str] = None) -> List[MetricData]: - """Bulk metric data. - - Fetch multiple metrics in a single request. Supports filtering by index and date range. Returns an array of MetricData objects.""" - params = [] - params.append(f'metrics={metrics}') - params.append(f'index={index}') - if from_ is not None: params.append(f'from={from_}') - if to is not None: params.append(f'to={to}') - if count is not None: params.append(f'count={count}') - if format is not None: params.append(f'format={format}') - query = '&'.join(params) - return self.get(f'/api/metrics/bulk{"?" + query if query else ""}') - - def get_api_metrics_catalog(self) -> TreeNode: - """Metrics catalog. - - Returns the complete hierarchical catalog of available metrics organized as a tree structure. Metrics are grouped by categories and subcategories. Best viewed in an interactive JSON viewer (e.g., Firefox's built-in JSON viewer) for easy navigation of the nested structure.""" - return self.get('/api/metrics/catalog') - - def get_api_metrics_count(self) -> List[MetricCount]: - """Metric count. - - Current metric count""" - return self.get('/api/metrics/count') - - def get_api_metrics_indexes(self) -> List[IndexInfo]: - """List available indexes. - - Returns all available indexes with their accepted query aliases. Use any alias when querying metrics.""" - return self.get('/api/metrics/indexes') - - def get_api_metrics_list(self, page: Optional[str] = None) -> PaginatedMetrics: - """Metrics list. - - Paginated list of available metrics""" - params = [] - if page is not None: params.append(f'page={page}') - query = '&'.join(params) - return self.get(f'/api/metrics/list{"?" + query if query else ""}') - - def get_api_metrics_search_by_metric(self, metric: str, limit: Optional[str] = None) -> List[Metric]: - """Search metrics. - - Fuzzy search for metrics by name. Supports partial matches and typos.""" - params = [] - if limit is not None: params.append(f'limit={limit}') - query = '&'.join(params) - return self.get(f'/api/metrics/search/{metric}{"?" + query if query else ""}') - - def get_api_tx_by_txid(self, txid: str) -> Transaction: - """Transaction information. - - Retrieve complete transaction data by transaction ID (txid). Returns the full transaction details including inputs, outputs, and metadata. The transaction data is read directly from the blockchain data files.""" - return self.get(f'/api/tx/{txid}') - - def get_api_tx_by_txid_hex(self, txid: str) -> Hex: - """Transaction hex. - - Retrieve the raw transaction as a hex-encoded string. Returns the serialized transaction in hexadecimal format.""" - return self.get(f'/api/tx/{txid}/hex') - - def get_api_tx_by_txid_outspend_by_vout(self, txid: str, vout: str) -> TxOutspend: - """Output spend status. - - Get the spending status of a transaction output. Returns whether the output has been spent and, if so, the spending transaction details.""" - return self.get(f'/api/tx/{txid}/outspend/{vout}') - - def get_api_tx_by_txid_outspends(self, txid: str) -> List[TxOutspend]: - """All output spend statuses. - - Get the spending status of all outputs in a transaction. Returns an array with the spend status for each output.""" - return self.get(f'/api/tx/{txid}/outspends') - - def get_api_tx_by_txid_status(self, txid: str) -> TxStatus: - """Transaction status. - - Retrieve the confirmation status of a transaction. Returns whether the transaction is confirmed and, if so, the block height, hash, and timestamp.""" - return self.get(f'/api/tx/{txid}/status') - - def get_api_v1_difficulty_adjustment(self) -> DifficultyAdjustment: - """Difficulty adjustment. - - Get current difficulty adjustment information including progress through the current epoch, estimated retarget date, and difficulty change prediction.""" - return self.get('/api/v1/difficulty-adjustment') - - def get_api_v1_fees_mempool_blocks(self) -> List[MempoolBlock]: - """Projected mempool blocks. - - Get projected blocks from the mempool for fee estimation. Each block contains statistics about transactions that would be included if a block were mined now.""" - return self.get('/api/v1/fees/mempool-blocks') - - def get_api_v1_fees_recommended(self) -> RecommendedFees: - """Recommended fees. - - Get recommended fee rates for different confirmation targets based on current mempool state.""" - return self.get('/api/v1/fees/recommended') - - def get_api_v1_mining_blocks_fees_by_time_period(self, time_period: str) -> List[BlockFeesEntry]: - """Block fees. - - Get average block fees for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" - return self.get(f'/api/v1/mining/blocks/fees/{time_period}') - - def get_api_v1_mining_blocks_rewards_by_time_period(self, time_period: str) -> List[BlockRewardsEntry]: - """Block rewards. - - Get average block rewards (coinbase = subsidy + fees) for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" - return self.get(f'/api/v1/mining/blocks/rewards/{time_period}') - - def get_api_v1_mining_blocks_sizes_weights_by_time_period(self, time_period: str) -> BlockSizesWeights: - """Block sizes and weights. - - Get average block sizes and weights for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" - return self.get(f'/api/v1/mining/blocks/sizes-weights/{time_period}') - - def get_api_v1_mining_blocks_timestamp_by_timestamp(self, timestamp: str) -> BlockTimestamp: - """Block by timestamp. - - Find the block closest to a given UNIX timestamp.""" - return self.get(f'/api/v1/mining/blocks/timestamp/{timestamp}') - - def get_api_v1_mining_difficulty_adjustments(self) -> List[DifficultyAdjustmentEntry]: - """Difficulty adjustments (all time). - - Get historical difficulty adjustments. Returns array of [timestamp, height, difficulty, change_percent].""" - return self.get('/api/v1/mining/difficulty-adjustments') - - def get_api_v1_mining_difficulty_adjustments_by_time_period(self, time_period: str) -> List[DifficultyAdjustmentEntry]: - """Difficulty adjustments. - - Get historical difficulty adjustments for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y. Returns array of [timestamp, height, difficulty, change_percent].""" - return self.get(f'/api/v1/mining/difficulty-adjustments/{time_period}') - - def get_api_v1_mining_hashrate(self) -> HashrateSummary: - """Network hashrate (all time). - - Get network hashrate and difficulty data for all time.""" - return self.get('/api/v1/mining/hashrate') - - def get_api_v1_mining_hashrate_by_time_period(self, time_period: str) -> HashrateSummary: - """Network hashrate. - - Get network hashrate and difficulty data for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" - return self.get(f'/api/v1/mining/hashrate/{time_period}') - - def get_api_v1_mining_pool_by_slug(self, slug: str) -> PoolDetail: - """Mining pool details. - - Get detailed information about a specific mining pool including block counts and shares for different time periods.""" - return self.get(f'/api/v1/mining/pool/{slug}') - - def get_api_v1_mining_pools(self) -> List[PoolInfo]: - """List all mining pools. - - Get list of all known mining pools with their identifiers.""" - return self.get('/api/v1/mining/pools') - - def get_api_v1_mining_pools_by_time_period(self, time_period: str) -> PoolsSummary: - """Mining pool statistics. - - Get mining pool statistics for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" - return self.get(f'/api/v1/mining/pools/{time_period}') - - def get_api_v1_mining_reward_stats_by_block_count(self, block_count: str) -> RewardStats: - """Mining reward statistics. - - Get mining reward statistics for the last N blocks including total rewards, fees, and transaction count.""" - return self.get(f'/api/v1/mining/reward-stats/{block_count}') - - def get_api_v1_validate_address_by_address(self, address: str) -> AddressValidation: - """Validate address. - - Validate a Bitcoin address and get information about its type and scriptPubKey.""" - return self.get(f'/api/v1/validate-address/{address}') - - def get_health(self) -> Health: - """Health check. - - Returns the health status of the API server""" - return self.get('/health') - - def get_version(self) -> str: - """API version. - - Returns the current version of the API server""" - return self.get('/version') - diff --git a/packages/brk_client/brk_client/__init__.py b/packages/brk_client/brk_client/__init__.py new file mode 100644 index 000000000..1d1952808 --- /dev/null +++ b/packages/brk_client/brk_client/__init__.py @@ -0,0 +1,5412 @@ +# Auto-generated BRK Python client +# Do not edit manually + +from __future__ import annotations +from typing import TypeVar, Generic, Any, Optional, List, Literal, TypedDict, Final, Union, Protocol +import httpx + +T = TypeVar('T') + +# Type definitions + +Address = str +Sats = int +TypeIndex = int +class AddressChainStats(TypedDict): + funded_txo_count: int + funded_txo_sum: Sats + spent_txo_count: int + spent_txo_sum: Sats + tx_count: int + type_index: TypeIndex + +class AddressMempoolStats(TypedDict): + funded_txo_count: int + funded_txo_sum: Sats + spent_txo_count: int + spent_txo_sum: Sats + tx_count: int + +class AddressParam(TypedDict): + address: Address + +class AddressStats(TypedDict): + address: Address + chain_stats: AddressChainStats + mempool_stats: Union[AddressMempoolStats, None] + +Txid = str +class AddressTxidsParam(TypedDict): + after_txid: Union[Txid, None] + limit: int + +class AddressValidation(TypedDict): + isvalid: bool + address: Optional[str] + scriptPubKey: Optional[str] + isscript: Optional[bool] + iswitness: Optional[bool] + witness_version: Optional[int] + witness_program: Optional[str] + +AnyAddressIndex = TypeIndex +Bitcoin = float +BlkPosition = int +class BlockCountParam(TypedDict): + block_count: int + +Height = int +Timestamp = int +class BlockFeesEntry(TypedDict): + avgHeight: Height + timestamp: Timestamp + avgFees: Sats + +BlockHash = str +class BlockHashParam(TypedDict): + hash: BlockHash + +TxIndex = int +class BlockHashStartIndex(TypedDict): + hash: BlockHash + start_index: TxIndex + +class BlockHashTxIndex(TypedDict): + hash: BlockHash + index: TxIndex + +Weight = int +class BlockInfo(TypedDict): + id: BlockHash + height: Height + tx_count: int + size: int + weight: Weight + timestamp: Timestamp + difficulty: float + +class BlockRewardsEntry(TypedDict): + avgHeight: int + timestamp: int + avgRewards: int + +class BlockSizeEntry(TypedDict): + avgHeight: int + timestamp: int + avgSize: int + +class BlockWeightEntry(TypedDict): + avgHeight: int + timestamp: int + avgWeight: int + +class BlockSizesWeights(TypedDict): + sizes: List[BlockSizeEntry] + weights: List[BlockWeightEntry] + +class BlockStatus(TypedDict): + in_best_chain: bool + height: Union[Height, None] + next_best: Union[BlockHash, None] + +class BlockTimestamp(TypedDict): + height: Height + hash: BlockHash + timestamp: str + +Cents = int +Close = Cents +Format = Literal["json", "csv"] +class DataRangeFormat(TypedDict): + from_: Optional[int] + to: Optional[int] + count: Optional[int] + format: Format + +Date = int +DateIndex = int +DecadeIndex = int +class DifficultyAdjustment(TypedDict): + progressPercent: float + difficultyChange: float + estimatedRetargetDate: int + remainingBlocks: int + remainingTime: int + previousRetarget: float + nextRetargetHeight: Height + timeAvg: int + adjustedTimeAvg: int + timeOffset: int + +class DifficultyAdjustmentEntry(TypedDict): + timestamp: Timestamp + height: Height + difficulty: float + change_percent: float + +class DifficultyEntry(TypedDict): + timestamp: Timestamp + difficulty: float + height: Height + +DifficultyEpoch = int +Dollars = float +class EmptyAddressData(TypedDict): + tx_count: int + funded_txo_count: int + transfered: Sats + +EmptyAddressIndex = TypeIndex +EmptyOutputIndex = TypeIndex +FeeRate = float +HalvingEpoch = int +class HashrateEntry(TypedDict): + timestamp: Timestamp + avgHashrate: int + +class HashrateSummary(TypedDict): + hashrates: List[HashrateEntry] + difficulty: List[DifficultyEntry] + currentHashrate: int + currentDifficulty: float + +class Health(TypedDict): + status: str + service: str + timestamp: str + +class HeightParam(TypedDict): + height: Height + +Hex = str +High = Cents +class IndexInfo(TypedDict): + index: Index + aliases: List[str] + +Limit = int +class LimitParam(TypedDict): + limit: Limit + +class LoadedAddressData(TypedDict): + tx_count: int + funded_txo_count: int + spent_txo_count: int + received: Sats + sent: Sats + realized_cap: Dollars + +LoadedAddressIndex = TypeIndex +Low = Cents +class MempoolBlock(TypedDict): + blockSize: int + blockVSize: float + nTx: int + totalFees: Sats + medianFee: FeeRate + feeRange: List[FeeRate] + +VSize = int +class MempoolInfo(TypedDict): + count: int + vsize: VSize + total_fee: Sats + +Metric = str +class MetricCount(TypedDict): + distinct_metrics: int + total_endpoints: int + lazy_endpoints: int + stored_endpoints: int + +class MetricData(TypedDict): + total: int + from_: int + to: int + data: List[Any] + +class MetricParam(TypedDict): + metric: Metric + +Metrics = str +class MetricSelection(TypedDict): + metrics: Metrics + index: Index + from_: Optional[int] + to: Optional[int] + count: Optional[int] + format: Format + +class MetricSelectionLegacy(TypedDict): + index: Index + ids: Metrics + from_: Optional[int] + to: Optional[int] + count: Optional[int] + format: Format + +class MetricWithIndex(TypedDict): + metric: Metric + index: Index + +MonthIndex = int +Open = Cents +class OHLCCents(TypedDict): + open: Open + high: High + low: Low + close: Close + +class OHLCDollars(TypedDict): + open: Open + high: High + low: Low + close: Close + +class OHLCSats(TypedDict): + open: Open + high: High + low: Low + close: Close + +OpReturnIndex = TypeIndex +OutPoint = int +OutputType = Literal["p2pk65", "p2pk33", "p2pkh", "p2ms", "p2sh", "opreturn", "p2wpkh", "p2wsh", "p2tr", "p2a", "empty", "unknown"] +P2AAddressIndex = TypeIndex +U8x2 = List[int] +P2ABytes = U8x2 +P2MSOutputIndex = TypeIndex +P2PK33AddressIndex = TypeIndex +U8x33 = str +P2PK33Bytes = U8x33 +P2PK65AddressIndex = TypeIndex +U8x65 = str +P2PK65Bytes = U8x65 +P2PKHAddressIndex = TypeIndex +U8x20 = List[int] +P2PKHBytes = U8x20 +P2SHAddressIndex = TypeIndex +P2SHBytes = U8x20 +P2TRAddressIndex = TypeIndex +U8x32 = List[int] +P2TRBytes = U8x32 +P2WPKHAddressIndex = TypeIndex +P2WPKHBytes = U8x20 +P2WSHAddressIndex = TypeIndex +P2WSHBytes = U8x32 +class PaginatedMetrics(TypedDict): + current_page: int + max_page: int + metrics: List[str] + +class Pagination(TypedDict): + page: Optional[int] + +class PoolBlockCounts(TypedDict): + all: int + _24h: int + _1w: int + +class PoolBlockShares(TypedDict): + all: float + _24h: float + _1w: float + +PoolSlug = Literal["unknown", "blockfills", "ultimuspool", "terrapool", "luxor", "onethash", "btccom", "bitfarms", "huobipool", "wayicn", "canoepool", "btctop", "bitcoincom", "pool175btc", "gbminers", "axbt", "asicminer", "bitminter", "bitcoinrussia", "btcserv", "simplecoinus", "btcguild", "eligius", "ozcoin", "eclipsemc", "maxbtc", "triplemining", "coinlab", "pool50btc", "ghashio", "stminingcorp", "bitparking", "mmpool", "polmine", "kncminer", "bitalo", "f2pool", "hhtt", "megabigpower", "mtred", "nmcbit", "yourbtcnet", "givemecoins", "braiinspool", "antpool", "multicoinco", "bcpoolio", "cointerra", "kanopool", "solock", "ckpool", "nicehash", "bitclub", "bitcoinaffiliatenetwork", "btcc", "bwpool", "exxbw", "bitsolo", "bitfury", "twentyoneinc", "digitalbtc", "eightbaochi", "mybtccoinpool", "tbdice", "hashpool", "nexious", "bravomining", "hotpool", "okexpool", "bcmonster", "onehash", "bixin", "tatmaspool", "viabtc", "connectbtc", "batpool", "waterhole", "dcexploration", "dcex", "btpool", "fiftyeightcoin", "bitcoinindia", "shawnp0wers", "phashio", "rigpool", "haozhuzhu", "sevenpool", "miningkings", "hashbx", "dpool", "rawpool", "haominer", "helix", "bitcoinukraine", "poolin", "secretsuperstar", "tigerpoolnet", "sigmapoolcom", "okpooltop", "hummerpool", "tangpool", "bytepool", "spiderpool", "novablock", "miningcity", "binancepool", "minerium", "lubiancom", "okkong", "aaopool", "emcdpool", "foundryusa", "sbicrypto", "arkpool", "purebtccom", "marapool", "kucoinpool", "entrustcharitypool", "okminer", "titan", "pegapool", "btcnuggets", "cloudhashing", "digitalxmintsy", "telco214", "btcpoolparty", "multipool", "transactioncoinmining", "btcdig", "trickysbtcpool", "btcmp", "eobot", "unomp", "patels", "gogreenlight", "ekanembtc", "canoe", "tiger", "onem1x", "zulupool", "secpool", "ocean", "whitepool", "wk057", "futurebitapollosolo", "carbonnegative", "portlandhodl", "phoenix", "neopool", "maxipool", "bitfufupool", "luckypool", "miningdutch", "publicpool", "miningsquared", "innopolistech", "btclab", "parasite"] +class PoolDetailInfo(TypedDict): + id: int + name: str + link: str + addresses: List[str] + regexes: List[str] + slug: PoolSlug + +class PoolDetail(TypedDict): + pool: PoolDetailInfo + blockCount: PoolBlockCounts + blockShare: PoolBlockShares + estimatedHashrate: int + reportedHashrate: Optional[int] + +class PoolInfo(TypedDict): + name: str + slug: PoolSlug + unique_id: int + +class PoolSlugParam(TypedDict): + slug: PoolSlug + +class PoolStats(TypedDict): + poolId: int + name: str + link: str + blockCount: int + rank: int + emptyBlocks: int + slug: PoolSlug + share: float + +class PoolsSummary(TypedDict): + pools: List[PoolStats] + blockCount: int + lastEstimatedHashrate: int + +QuarterIndex = int +RawLockTime = int +class RecommendedFees(TypedDict): + fastestFee: FeeRate + halfHourFee: FeeRate + hourFee: FeeRate + economyFee: FeeRate + minimumFee: FeeRate + +class RewardStats(TypedDict): + startBlock: Height + endBlock: Height + totalReward: Sats + totalFee: Sats + totalTx: int + +SemesterIndex = int +StoredBool = int +StoredF32 = float +StoredF64 = float +StoredI16 = int +StoredU16 = int +StoredU32 = int +StoredU64 = int +class SupplyState(TypedDict): + utxo_count: int + value: Sats + +TimePeriod = Literal["24h", "3d", "1w", "1m", "3m", "6m", "1y", "2y", "3y"] +class TimePeriodParam(TypedDict): + time_period: TimePeriod + +class TimestampParam(TypedDict): + timestamp: Timestamp + +class TxOut(TypedDict): + scriptpubkey: str + value: Sats + +Vout = int +class TxIn(TypedDict): + txid: Txid + vout: Vout + prevout: Union[TxOut, None] + scriptsig: str + scriptsig_asm: str + is_coinbase: bool + sequence: int + inner_redeemscript_asm: Optional[str] + +class TxStatus(TypedDict): + confirmed: bool + block_height: Union[Height, None] + block_hash: Union[BlockHash, None] + block_time: Union[Timestamp, None] + +TxVersion = int +class Transaction(TypedDict): + index: Union[TxIndex, None] + txid: Txid + version: TxVersion + locktime: RawLockTime + size: int + weight: Weight + sigops: int + fee: Sats + vin: List[TxIn] + vout: List[TxOut] + status: TxStatus + +TxInIndex = int +TxOutIndex = int +Vin = int +class TxOutspend(TypedDict): + spent: bool + txid: Union[Txid, None] + vin: Union[Vin, None] + status: Union[TxStatus, None] + +class TxidParam(TypedDict): + txid: Txid + +class TxidVout(TypedDict): + txid: Txid + vout: Vout + +UnknownOutputIndex = TypeIndex +class Utxo(TypedDict): + txid: Txid + vout: Vout + status: TxStatus + value: Sats + +class ValidateAddressParam(TypedDict): + address: str + +WeekIndex = int +YearIndex = int +Index = Literal["dateindex", "decadeindex", "difficultyepoch", "emptyoutputindex", "halvingepoch", "height", "txinindex", "monthindex", "opreturnindex", "txoutindex", "p2aaddressindex", "p2msoutputindex", "p2pk33addressindex", "p2pk65addressindex", "p2pkhaddressindex", "p2shaddressindex", "p2traddressindex", "p2wpkhaddressindex", "p2wshaddressindex", "quarterindex", "semesterindex", "txindex", "unknownoutputindex", "weekindex", "yearindex", "loadedaddressindex", "emptyaddressindex"] +class MetricLeafWithSchema(TypedDict): + name: str + value_type: str + indexes: List[Index] + +TreeNode = Union[dict[str, "TreeNode"], MetricLeafWithSchema] + +class BrkError(Exception): + """Custom error class for BRK client errors.""" + + def __init__(self, message: str, status: Optional[int] = None): + super().__init__(message) + self.status = status + + +class BrkClientBase: + """Base HTTP client for making requests.""" + + def __init__(self, base_url: str, timeout: float = 30.0): + self.base_url = base_url + self.timeout = timeout + self._client = httpx.Client(timeout=timeout) + + def get(self, path: str) -> Any: + """Make a GET request.""" + try: + base = self.base_url.rstrip('/') + response = self._client.get(f"{base}{path}") + response.raise_for_status() + return response.json() + except httpx.HTTPStatusError as e: + raise BrkError(f"HTTP error: {e.response.status_code}", e.response.status_code) + except httpx.RequestError as e: + raise BrkError(str(e)) + + def close(self): + """Close the HTTP client.""" + self._client.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + +def _m(acc: str, s: str) -> str: + """Build metric name with optional prefix.""" + return f"{acc}_{s}" if acc else s + + +class Endpoint(Generic[T]): + """An endpoint for a specific metric + index combination.""" + + def __init__(self, client: BrkClientBase, name: str, index: str): + self._client = client + self._name = name + self._index = index + + def get(self) -> List[T]: + """Fetch all data points for this metric/index.""" + return self._client.get(self.path()) + + def range(self, from_val: Optional[int] = None, to_val: Optional[int] = None) -> List[T]: + """Fetch data points within a range.""" + params = [] + if from_val is not None: + params.append(f"from={from_val}") + if to_val is not None: + params.append(f"to={to_val}") + query = "&".join(params) + p = self.path() + return self._client.get(f"{p}?{query}" if query else p) + + def path(self) -> str: + """Get the endpoint path.""" + return f"/api/metric/{self._name}/{self._index}" + + +class MetricPattern(Protocol[T]): + """Protocol for metric patterns with different index sets.""" + + @property + def name(self) -> str: + """Get the metric name.""" + ... + + def indexes(self) -> List[str]: + """Get the list of available indexes for this metric.""" + ... + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + ... + + +# Index accessor classes + +class _MetricPattern1By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_dateindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'dateindex') + + def by_decadeindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'decadeindex') + + def by_difficultyepoch(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'difficultyepoch') + + def by_height(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'height') + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_quarterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'quarterindex') + + def by_semesterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'semesterindex') + + def by_weekindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'weekindex') + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern1(Generic[T]): + """Index accessor for metrics with 9 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern1By[T] = _MetricPattern1By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['dateindex', 'decadeindex', 'difficultyepoch', 'height', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'dateindex': return self.by.by_dateindex() + elif index == 'decadeindex': return self.by.by_decadeindex() + elif index == 'difficultyepoch': return self.by.by_difficultyepoch() + elif index == 'height': return self.by.by_height() + elif index == 'monthindex': return self.by.by_monthindex() + elif index == 'quarterindex': return self.by.by_quarterindex() + elif index == 'semesterindex': return self.by.by_semesterindex() + elif index == 'weekindex': return self.by.by_weekindex() + elif index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern2By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_dateindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'dateindex') + + def by_decadeindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'decadeindex') + + def by_difficultyepoch(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'difficultyepoch') + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_quarterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'quarterindex') + + def by_semesterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'semesterindex') + + def by_weekindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'weekindex') + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern2(Generic[T]): + """Index accessor for metrics with 8 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern2By[T] = _MetricPattern2By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['dateindex', 'decadeindex', 'difficultyepoch', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'dateindex': return self.by.by_dateindex() + elif index == 'decadeindex': return self.by.by_decadeindex() + elif index == 'difficultyepoch': return self.by.by_difficultyepoch() + elif index == 'monthindex': return self.by.by_monthindex() + elif index == 'quarterindex': return self.by.by_quarterindex() + elif index == 'semesterindex': return self.by.by_semesterindex() + elif index == 'weekindex': return self.by.by_weekindex() + elif index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern3By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_dateindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'dateindex') + + def by_decadeindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'decadeindex') + + def by_height(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'height') + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_quarterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'quarterindex') + + def by_semesterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'semesterindex') + + def by_weekindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'weekindex') + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern3(Generic[T]): + """Index accessor for metrics with 8 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern3By[T] = _MetricPattern3By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['dateindex', 'decadeindex', 'height', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'dateindex': return self.by.by_dateindex() + elif index == 'decadeindex': return self.by.by_decadeindex() + elif index == 'height': return self.by.by_height() + elif index == 'monthindex': return self.by.by_monthindex() + elif index == 'quarterindex': return self.by.by_quarterindex() + elif index == 'semesterindex': return self.by.by_semesterindex() + elif index == 'weekindex': return self.by.by_weekindex() + elif index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern4By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_dateindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'dateindex') + + def by_decadeindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'decadeindex') + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_quarterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'quarterindex') + + def by_semesterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'semesterindex') + + def by_weekindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'weekindex') + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern4(Generic[T]): + """Index accessor for metrics with 7 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern4By[T] = _MetricPattern4By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['dateindex', 'decadeindex', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'dateindex': return self.by.by_dateindex() + elif index == 'decadeindex': return self.by.by_decadeindex() + elif index == 'monthindex': return self.by.by_monthindex() + elif index == 'quarterindex': return self.by.by_quarterindex() + elif index == 'semesterindex': return self.by.by_semesterindex() + elif index == 'weekindex': return self.by.by_weekindex() + elif index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern5By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_decadeindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'decadeindex') + + def by_height(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'height') + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_quarterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'quarterindex') + + def by_semesterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'semesterindex') + + def by_weekindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'weekindex') + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern5(Generic[T]): + """Index accessor for metrics with 7 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern5By[T] = _MetricPattern5By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['decadeindex', 'height', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'decadeindex': return self.by.by_decadeindex() + elif index == 'height': return self.by.by_height() + elif index == 'monthindex': return self.by.by_monthindex() + elif index == 'quarterindex': return self.by.by_quarterindex() + elif index == 'semesterindex': return self.by.by_semesterindex() + elif index == 'weekindex': return self.by.by_weekindex() + elif index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern6By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_decadeindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'decadeindex') + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_quarterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'quarterindex') + + def by_semesterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'semesterindex') + + def by_weekindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'weekindex') + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern6(Generic[T]): + """Index accessor for metrics with 6 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern6By[T] = _MetricPattern6By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['decadeindex', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'decadeindex': return self.by.by_decadeindex() + elif index == 'monthindex': return self.by.by_monthindex() + elif index == 'quarterindex': return self.by.by_quarterindex() + elif index == 'semesterindex': return self.by.by_semesterindex() + elif index == 'weekindex': return self.by.by_weekindex() + elif index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern7By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_emptyoutputindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'emptyoutputindex') + + def by_opreturnindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'opreturnindex') + + def by_p2msoutputindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2msoutputindex') + + def by_unknownoutputindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'unknownoutputindex') + +class MetricPattern7(Generic[T]): + """Index accessor for metrics with 4 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern7By[T] = _MetricPattern7By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['emptyoutputindex', 'opreturnindex', 'p2msoutputindex', 'unknownoutputindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'emptyoutputindex': return self.by.by_emptyoutputindex() + elif index == 'opreturnindex': return self.by.by_opreturnindex() + elif index == 'p2msoutputindex': return self.by.by_p2msoutputindex() + elif index == 'unknownoutputindex': return self.by.by_unknownoutputindex() + return None + +class _MetricPattern8By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_quarterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'quarterindex') + + def by_semesterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'semesterindex') + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern8(Generic[T]): + """Index accessor for metrics with 3 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern8By[T] = _MetricPattern8By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['quarterindex', 'semesterindex', 'yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'quarterindex': return self.by.by_quarterindex() + elif index == 'semesterindex': return self.by.by_semesterindex() + elif index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern9By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_dateindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'dateindex') + + def by_height(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'height') + +class MetricPattern9(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern9By[T] = _MetricPattern9By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['dateindex', 'height'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'dateindex': return self.by.by_dateindex() + elif index == 'height': return self.by.by_height() + return None + +class _MetricPattern10By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_dateindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'dateindex') + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + +class MetricPattern10(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern10By[T] = _MetricPattern10By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['dateindex', 'monthindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'dateindex': return self.by.by_dateindex() + elif index == 'monthindex': return self.by.by_monthindex() + return None + +class _MetricPattern11By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_dateindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'dateindex') + + def by_weekindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'weekindex') + +class MetricPattern11(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern11By[T] = _MetricPattern11By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['dateindex', 'weekindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'dateindex': return self.by.by_dateindex() + elif index == 'weekindex': return self.by.by_weekindex() + return None + +class _MetricPattern12By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_decadeindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'decadeindex') + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern12(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern12By[T] = _MetricPattern12By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['decadeindex', 'yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'decadeindex': return self.by.by_decadeindex() + elif index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern13By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_difficultyepoch(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'difficultyepoch') + + def by_halvingepoch(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'halvingepoch') + +class MetricPattern13(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern13By[T] = _MetricPattern13By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['difficultyepoch', 'halvingepoch'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'difficultyepoch': return self.by.by_difficultyepoch() + elif index == 'halvingepoch': return self.by.by_halvingepoch() + return None + +class _MetricPattern14By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_difficultyepoch(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'difficultyepoch') + + def by_height(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'height') + +class MetricPattern14(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern14By[T] = _MetricPattern14By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['difficultyepoch', 'height'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'difficultyepoch': return self.by.by_difficultyepoch() + elif index == 'height': return self.by.by_height() + return None + +class _MetricPattern15By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_halvingepoch(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'halvingepoch') + + def by_height(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'height') + +class MetricPattern15(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern15By[T] = _MetricPattern15By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['halvingepoch', 'height'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'halvingepoch': return self.by.by_halvingepoch() + elif index == 'height': return self.by.by_height() + return None + +class _MetricPattern16By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_height(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'height') + + def by_txindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'txindex') + +class MetricPattern16(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern16By[T] = _MetricPattern16By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['height', 'txindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'height': return self.by.by_height() + elif index == 'txindex': return self.by.by_txindex() + return None + +class _MetricPattern17By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_quarterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'quarterindex') + +class MetricPattern17(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern17By[T] = _MetricPattern17By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['monthindex', 'quarterindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'monthindex': return self.by.by_monthindex() + elif index == 'quarterindex': return self.by.by_quarterindex() + return None + +class _MetricPattern18By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_semesterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'semesterindex') + +class MetricPattern18(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern18By[T] = _MetricPattern18By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['monthindex', 'semesterindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'monthindex': return self.by.by_monthindex() + elif index == 'semesterindex': return self.by.by_semesterindex() + return None + +class _MetricPattern19By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_weekindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'weekindex') + +class MetricPattern19(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern19By[T] = _MetricPattern19By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['monthindex', 'weekindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'monthindex': return self.by.by_monthindex() + elif index == 'weekindex': return self.by.by_weekindex() + return None + +class _MetricPattern20By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern20(Generic[T]): + """Index accessor for metrics with 2 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern20By[T] = _MetricPattern20By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['monthindex', 'yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'monthindex': return self.by.by_monthindex() + elif index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern21By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_dateindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'dateindex') + +class MetricPattern21(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern21By[T] = _MetricPattern21By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['dateindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'dateindex': return self.by.by_dateindex() + return None + +class _MetricPattern22By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_decadeindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'decadeindex') + +class MetricPattern22(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern22By[T] = _MetricPattern22By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['decadeindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'decadeindex': return self.by.by_decadeindex() + return None + +class _MetricPattern23By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_difficultyepoch(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'difficultyepoch') + +class MetricPattern23(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern23By[T] = _MetricPattern23By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['difficultyepoch'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'difficultyepoch': return self.by.by_difficultyepoch() + return None + +class _MetricPattern24By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_emptyoutputindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'emptyoutputindex') + +class MetricPattern24(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern24By[T] = _MetricPattern24By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['emptyoutputindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'emptyoutputindex': return self.by.by_emptyoutputindex() + return None + +class _MetricPattern25By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_height(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'height') + +class MetricPattern25(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern25By[T] = _MetricPattern25By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['height'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'height': return self.by.by_height() + return None + +class _MetricPattern26By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_txinindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'txinindex') + +class MetricPattern26(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern26By[T] = _MetricPattern26By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['txinindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'txinindex': return self.by.by_txinindex() + return None + +class _MetricPattern27By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_monthindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'monthindex') + +class MetricPattern27(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern27By[T] = _MetricPattern27By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['monthindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'monthindex': return self.by.by_monthindex() + return None + +class _MetricPattern28By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_opreturnindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'opreturnindex') + +class MetricPattern28(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern28By[T] = _MetricPattern28By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['opreturnindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'opreturnindex': return self.by.by_opreturnindex() + return None + +class _MetricPattern29By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_txoutindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'txoutindex') + +class MetricPattern29(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern29By[T] = _MetricPattern29By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['txoutindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'txoutindex': return self.by.by_txoutindex() + return None + +class _MetricPattern30By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_p2aaddressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2aaddressindex') + +class MetricPattern30(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern30By[T] = _MetricPattern30By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['p2aaddressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'p2aaddressindex': return self.by.by_p2aaddressindex() + return None + +class _MetricPattern31By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_p2msoutputindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2msoutputindex') + +class MetricPattern31(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern31By[T] = _MetricPattern31By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['p2msoutputindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'p2msoutputindex': return self.by.by_p2msoutputindex() + return None + +class _MetricPattern32By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_p2pk33addressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2pk33addressindex') + +class MetricPattern32(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern32By[T] = _MetricPattern32By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['p2pk33addressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'p2pk33addressindex': return self.by.by_p2pk33addressindex() + return None + +class _MetricPattern33By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_p2pk65addressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2pk65addressindex') + +class MetricPattern33(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern33By[T] = _MetricPattern33By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['p2pk65addressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'p2pk65addressindex': return self.by.by_p2pk65addressindex() + return None + +class _MetricPattern34By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_p2pkhaddressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2pkhaddressindex') + +class MetricPattern34(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern34By[T] = _MetricPattern34By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['p2pkhaddressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'p2pkhaddressindex': return self.by.by_p2pkhaddressindex() + return None + +class _MetricPattern35By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_p2shaddressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2shaddressindex') + +class MetricPattern35(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern35By[T] = _MetricPattern35By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['p2shaddressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'p2shaddressindex': return self.by.by_p2shaddressindex() + return None + +class _MetricPattern36By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_p2traddressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2traddressindex') + +class MetricPattern36(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern36By[T] = _MetricPattern36By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['p2traddressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'p2traddressindex': return self.by.by_p2traddressindex() + return None + +class _MetricPattern37By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_p2wpkhaddressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2wpkhaddressindex') + +class MetricPattern37(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern37By[T] = _MetricPattern37By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['p2wpkhaddressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'p2wpkhaddressindex': return self.by.by_p2wpkhaddressindex() + return None + +class _MetricPattern38By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_p2wshaddressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'p2wshaddressindex') + +class MetricPattern38(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern38By[T] = _MetricPattern38By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['p2wshaddressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'p2wshaddressindex': return self.by.by_p2wshaddressindex() + return None + +class _MetricPattern39By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_quarterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'quarterindex') + +class MetricPattern39(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern39By[T] = _MetricPattern39By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['quarterindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'quarterindex': return self.by.by_quarterindex() + return None + +class _MetricPattern40By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_semesterindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'semesterindex') + +class MetricPattern40(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern40By[T] = _MetricPattern40By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['semesterindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'semesterindex': return self.by.by_semesterindex() + return None + +class _MetricPattern41By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_txindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'txindex') + +class MetricPattern41(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern41By[T] = _MetricPattern41By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['txindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'txindex': return self.by.by_txindex() + return None + +class _MetricPattern42By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_unknownoutputindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'unknownoutputindex') + +class MetricPattern42(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern42By[T] = _MetricPattern42By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['unknownoutputindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'unknownoutputindex': return self.by.by_unknownoutputindex() + return None + +class _MetricPattern43By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_weekindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'weekindex') + +class MetricPattern43(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern43By[T] = _MetricPattern43By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['weekindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'weekindex': return self.by.by_weekindex() + return None + +class _MetricPattern44By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_yearindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'yearindex') + +class MetricPattern44(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern44By[T] = _MetricPattern44By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['yearindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'yearindex': return self.by.by_yearindex() + return None + +class _MetricPattern45By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_loadedaddressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'loadedaddressindex') + +class MetricPattern45(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern45By[T] = _MetricPattern45By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['loadedaddressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'loadedaddressindex': return self.by.by_loadedaddressindex() + return None + +class _MetricPattern46By(Generic[T]): + """Index endpoint methods container.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + + def by_emptyaddressindex(self) -> Endpoint[T]: + return Endpoint(self._client, self._name, 'emptyaddressindex') + +class MetricPattern46(Generic[T]): + """Index accessor for metrics with 1 indexes.""" + + def __init__(self, client: BrkClientBase, name: str): + self._client = client + self._name = name + self.by: _MetricPattern46By[T] = _MetricPattern46By(client, name) + + @property + def name(self) -> str: + """Get the metric name.""" + return self._name + + def indexes(self) -> List[str]: + """Get the list of available indexes.""" + return ['emptyaddressindex'] + + def get(self, index: str) -> Optional[Endpoint[T]]: + """Get an endpoint for a specific index, if supported.""" + if index == 'emptyaddressindex': return self.by.by_emptyaddressindex() + return None + +# Reusable structural pattern classes + +class RealizedPattern3: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.adjusted_sopr: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'adjusted_sopr')) + self.adjusted_sopr_30d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'adjusted_sopr_30d_ema')) + self.adjusted_sopr_7d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'adjusted_sopr_7d_ema')) + self.adjusted_value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_created')) + self.adjusted_value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_destroyed')) + self.mvrv: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'mvrv')) + self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'neg_realized_loss')) + self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'net_realized_pnl')) + self.net_realized_pnl_cumulative_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) + self.net_realized_pnl_rel_to_realized_cap: MetricPattern25[StoredF32] = MetricPattern25(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')) + self.realized_cap: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap')) + self.realized_cap_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'realized_cap_30d_delta')) + self.realized_cap_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap')) + self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_loss')) + self.realized_loss_rel_to_realized_cap: MetricPattern25[StoredF32] = MetricPattern25(client, _m(acc, 'realized_loss_rel_to_realized_cap')) + self.realized_price: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_price')) + self.realized_price_extra: ActivePriceRatioPattern = ActivePriceRatioPattern(client, _m(acc, 'realized_price_ratio')) + self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_profit')) + self.realized_profit_rel_to_realized_cap: MetricPattern25[StoredF32] = MetricPattern25(client, _m(acc, 'realized_profit_rel_to_realized_cap')) + self.realized_profit_to_loss_ratio: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'realized_profit_to_loss_ratio')) + self.realized_value: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_value')) + self.sell_side_risk_ratio: MetricPattern21[StoredF32] = MetricPattern21(client, _m(acc, 'sell_side_risk_ratio')) + self.sell_side_risk_ratio_30d_ema: MetricPattern21[StoredF32] = MetricPattern21(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) + self.sell_side_risk_ratio_7d_ema: MetricPattern21[StoredF32] = MetricPattern21(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) + self.sopr: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'sopr')) + self.sopr_30d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'sopr_30d_ema')) + self.sopr_7d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'sopr_7d_ema')) + self.total_realized_pnl: TotalRealizedPnlPattern[Dollars] = TotalRealizedPnlPattern(client, _m(acc, 'total_realized_pnl')) + self.value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_created')) + self.value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_destroyed')) + +class Ratio1ySdPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self._0sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, '0sd_usd')) + self.m0_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm0_5sd')) + self.m0_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm0_5sd_usd')) + self.m1_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm1_5sd')) + self.m1_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm1_5sd_usd')) + self.m1sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm1sd')) + self.m1sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm1sd_usd')) + self.m2_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm2_5sd')) + self.m2_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm2_5sd_usd')) + self.m2sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm2sd')) + self.m2sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm2sd_usd')) + self.m3sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm3sd')) + self.m3sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm3sd_usd')) + self.p0_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p0_5sd')) + self.p0_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p0_5sd_usd')) + self.p1_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p1_5sd')) + self.p1_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p1_5sd_usd')) + self.p1sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p1sd')) + self.p1sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p1sd_usd')) + self.p2_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p2_5sd')) + self.p2_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p2_5sd_usd')) + self.p2sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p2sd')) + self.p2sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p2sd_usd')) + self.p3sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p3sd')) + self.p3sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p3sd_usd')) + self.sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sd')) + self.sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sma')) + self.zscore: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'zscore')) + +class RealizedPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.mvrv: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'mvrv')) + self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'neg_realized_loss')) + self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'net_realized_pnl')) + self.net_realized_pnl_cumulative_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) + self.net_realized_pnl_rel_to_realized_cap: MetricPattern25[StoredF32] = MetricPattern25(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')) + self.realized_cap: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap')) + self.realized_cap_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'realized_cap_30d_delta')) + self.realized_cap_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap')) + self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_loss')) + self.realized_loss_rel_to_realized_cap: MetricPattern25[StoredF32] = MetricPattern25(client, _m(acc, 'realized_loss_rel_to_realized_cap')) + self.realized_price: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_price')) + self.realized_price_extra: ActivePriceRatioPattern = ActivePriceRatioPattern(client, _m(acc, 'realized_price_ratio')) + self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_profit')) + self.realized_profit_rel_to_realized_cap: MetricPattern25[StoredF32] = MetricPattern25(client, _m(acc, 'realized_profit_rel_to_realized_cap')) + self.realized_profit_to_loss_ratio: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'realized_profit_to_loss_ratio')) + self.realized_value: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_value')) + self.sell_side_risk_ratio: MetricPattern21[StoredF32] = MetricPattern21(client, _m(acc, 'sell_side_risk_ratio')) + self.sell_side_risk_ratio_30d_ema: MetricPattern21[StoredF32] = MetricPattern21(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) + self.sell_side_risk_ratio_7d_ema: MetricPattern21[StoredF32] = MetricPattern21(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) + self.sopr: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'sopr')) + self.sopr_30d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'sopr_30d_ema')) + self.sopr_7d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'sopr_7d_ema')) + self.total_realized_pnl: TotalRealizedPnlPattern[Dollars] = TotalRealizedPnlPattern(client, _m(acc, 'total_realized_pnl')) + self.value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_created')) + self.value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_destroyed')) + +class RealizedPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.mvrv: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'mvrv')) + self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'neg_realized_loss')) + self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'net_realized_pnl')) + self.net_realized_pnl_cumulative_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) + self.net_realized_pnl_rel_to_realized_cap: MetricPattern25[StoredF32] = MetricPattern25(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')) + self.realized_cap: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap')) + self.realized_cap_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'realized_cap_30d_delta')) + self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_loss')) + self.realized_loss_rel_to_realized_cap: MetricPattern25[StoredF32] = MetricPattern25(client, _m(acc, 'realized_loss_rel_to_realized_cap')) + self.realized_price: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_price')) + self.realized_price_extra: RealizedPriceExtraPattern = RealizedPriceExtraPattern(client, _m(acc, 'realized_price')) + self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_profit')) + self.realized_profit_rel_to_realized_cap: MetricPattern25[StoredF32] = MetricPattern25(client, _m(acc, 'realized_profit_rel_to_realized_cap')) + self.realized_value: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_value')) + self.sell_side_risk_ratio: MetricPattern21[StoredF32] = MetricPattern21(client, _m(acc, 'sell_side_risk_ratio')) + self.sell_side_risk_ratio_30d_ema: MetricPattern21[StoredF32] = MetricPattern21(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) + self.sell_side_risk_ratio_7d_ema: MetricPattern21[StoredF32] = MetricPattern21(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) + self.sopr: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'sopr')) + self.sopr_30d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'sopr_30d_ema')) + self.sopr_7d_ema: MetricPattern21[StoredF64] = MetricPattern21(client, _m(acc, 'sopr_7d_ema')) + self.total_realized_pnl: TotalRealizedPnlPattern[Dollars] = TotalRealizedPnlPattern(client, _m(acc, 'total_realized_pnl')) + self.value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_created')) + self.value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_destroyed')) + +class Price111dSmaPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.price: MetricPattern4[Dollars] = MetricPattern4(client, acc) + self.ratio: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio')) + self.ratio_1m_sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_1m_sma')) + self.ratio_1w_sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_1w_sma')) + self.ratio_1y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio_1y')) + self.ratio_2y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio_2y')) + self.ratio_4y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio_4y')) + self.ratio_pct1: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct1')) + self.ratio_pct1_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct1_usd')) + self.ratio_pct2: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct2')) + self.ratio_pct2_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct2_usd')) + self.ratio_pct5: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct5')) + self.ratio_pct5_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct5_usd')) + self.ratio_pct95: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct95')) + self.ratio_pct95_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct95_usd')) + self.ratio_pct98: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct98')) + self.ratio_pct98_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct98_usd')) + self.ratio_pct99: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct99')) + self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct99_usd')) + self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio')) + +class PercentilesPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.cost_basis_pct05: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct05')) + self.cost_basis_pct10: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct10')) + self.cost_basis_pct15: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct15')) + self.cost_basis_pct20: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct20')) + self.cost_basis_pct25: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct25')) + self.cost_basis_pct30: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct30')) + self.cost_basis_pct35: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct35')) + self.cost_basis_pct40: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct40')) + self.cost_basis_pct45: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct45')) + self.cost_basis_pct50: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct50')) + self.cost_basis_pct55: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct55')) + self.cost_basis_pct60: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct60')) + self.cost_basis_pct65: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct65')) + self.cost_basis_pct70: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct70')) + self.cost_basis_pct75: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct75')) + self.cost_basis_pct80: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct80')) + self.cost_basis_pct85: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct85')) + self.cost_basis_pct90: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct90')) + self.cost_basis_pct95: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct95')) + +class ActivePriceRatioPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.ratio: MetricPattern4[StoredF32] = MetricPattern4(client, acc) + self.ratio_1m_sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, '1m_sma')) + self.ratio_1w_sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, '1w_sma')) + self.ratio_1y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, '1y')) + self.ratio_2y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, '2y')) + self.ratio_4y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, '4y')) + self.ratio_pct1: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct1')) + self.ratio_pct1_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct1_usd')) + self.ratio_pct2: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct2')) + self.ratio_pct2_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct2_usd')) + self.ratio_pct5: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct5')) + self.ratio_pct5_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct5_usd')) + self.ratio_pct95: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct95')) + self.ratio_pct95_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct95_usd')) + self.ratio_pct98: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct98')) + self.ratio_pct98_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct98_usd')) + self.ratio_pct99: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct99')) + self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct99_usd')) + self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, acc) + +class RelativePattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.neg_unrealized_loss_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_market_cap')) + self.neg_unrealized_loss_rel_to_own_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap')) + self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl')) + self.net_unrealized_pnl_rel_to_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_market_cap')) + self.net_unrealized_pnl_rel_to_own_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')) + self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')) + self.nupl: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'nupl')) + self.supply_in_loss_rel_to_circulating_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_loss_rel_to_circulating_supply')) + self.supply_in_loss_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_loss_rel_to_own_supply')) + self.supply_in_profit_rel_to_circulating_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_profit_rel_to_circulating_supply')) + self.supply_in_profit_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_profit_rel_to_own_supply')) + self.supply_rel_to_circulating_supply: MetricPattern4[StoredF64] = MetricPattern4(client, _m(acc, 'supply_rel_to_circulating_supply')) + self.unrealized_loss_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_market_cap')) + self.unrealized_loss_rel_to_own_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')) + self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')) + self.unrealized_profit_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_market_cap')) + self.unrealized_profit_rel_to_own_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap')) + self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl')) + +class AXbtPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self._1d_dominance: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, '1d_dominance')) + self._1m_blocks_mined: MetricPattern4[StoredU32] = MetricPattern4(client, _m(acc, '1m_blocks_mined')) + self._1m_dominance: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, '1m_dominance')) + self._1w_blocks_mined: MetricPattern4[StoredU32] = MetricPattern4(client, _m(acc, '1w_blocks_mined')) + self._1w_dominance: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, '1w_dominance')) + self._1y_blocks_mined: MetricPattern4[StoredU32] = MetricPattern4(client, _m(acc, '1y_blocks_mined')) + self._1y_dominance: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, '1y_dominance')) + self.blocks_mined: BlockCountPattern[StoredU32] = BlockCountPattern(client, _m(acc, 'blocks_mined')) + self.coinbase: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, _m(acc, 'coinbase')) + self.days_since_block: MetricPattern4[StoredU16] = MetricPattern4(client, _m(acc, 'days_since_block')) + self.dominance: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'dominance')) + self.fee: SentPattern = SentPattern(client, _m(acc, 'fee')) + self.subsidy: SentPattern = SentPattern(client, _m(acc, 'subsidy')) + +class BitcoinPattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'avg')) + self.base: MetricPattern25[T] = MetricPattern25(client, acc) + self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) + self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max')) + self.median: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'median')) + self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min')) + self.pct10: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct10')) + self.pct25: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct25')) + self.pct75: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct75')) + self.pct90: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'pct90')) + self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) + +class RelativePattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.neg_unrealized_loss_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'neg_unrealized_loss_rel_to_market_cap')) + self.net_unrealized_pnl_rel_to_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_market_cap')) + self.nupl: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'nupl')) + self.supply_in_loss_rel_to_circulating_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_loss_rel_to_circulating_supply')) + self.supply_in_loss_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_loss_rel_to_own_supply')) + self.supply_in_profit_rel_to_circulating_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_profit_rel_to_circulating_supply')) + self.supply_in_profit_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_in_profit_rel_to_own_supply')) + self.supply_rel_to_circulating_supply: MetricPattern4[StoredF64] = MetricPattern4(client, _m(acc, 'supply_rel_to_circulating_supply')) + self.unrealized_loss_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_loss_rel_to_market_cap')) + self.unrealized_profit_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'unrealized_profit_rel_to_market_cap')) + +class BlockSizePattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.average: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'avg')) + self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) + self.max: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'max')) + self.median: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'median')) + self.min: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'min')) + self.pct10: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct10')) + self.pct25: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct25')) + self.pct75: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct75')) + self.pct90: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct90')) + self.sum: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'sum')) + +class UnrealizedPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.neg_unrealized_loss: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'neg_unrealized_loss')) + self.net_unrealized_pnl: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl')) + self.supply_in_loss: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply_in_loss')) + self.supply_in_loss_value: SupplyValuePattern = SupplyValuePattern(client, _m(acc, 'supply_in_loss')) + self.supply_in_profit: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply_in_profit')) + self.supply_in_profit_value: SupplyValuePattern = SupplyValuePattern(client, _m(acc, 'supply_in_profit')) + self.total_unrealized_pnl: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'total_unrealized_pnl')) + self.unrealized_loss: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'unrealized_loss')) + self.unrealized_profit: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'unrealized_profit')) + +class Constant0Pattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.dateindex: MetricPattern21[T] = MetricPattern21(client, acc) + self.decadeindex: MetricPattern22[T] = MetricPattern22(client, acc) + self.height: MetricPattern25[T] = MetricPattern25(client, acc) + self.monthindex: MetricPattern27[T] = MetricPattern27(client, acc) + self.quarterindex: MetricPattern39[T] = MetricPattern39(client, acc) + self.semesterindex: MetricPattern40[T] = MetricPattern40(client, acc) + self.weekindex: MetricPattern43[T] = MetricPattern43(client, acc) + self.yearindex: MetricPattern44[T] = MetricPattern44(client, acc) + +class AddresstypeToHeightToAddrCountPattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.p2a: MetricPattern25[T] = MetricPattern25(client, (f'p2a_{{acc}}' if acc else 'p2a')) + self.p2pk33: MetricPattern25[T] = MetricPattern25(client, (f'p2pk33_{{acc}}' if acc else 'p2pk33')) + self.p2pk65: MetricPattern25[T] = MetricPattern25(client, (f'p2pk65_{{acc}}' if acc else 'p2pk65')) + self.p2pkh: MetricPattern25[T] = MetricPattern25(client, (f'p2pkh_{{acc}}' if acc else 'p2pkh')) + self.p2sh: MetricPattern25[T] = MetricPattern25(client, (f'p2sh_{{acc}}' if acc else 'p2sh')) + self.p2tr: MetricPattern25[T] = MetricPattern25(client, (f'p2tr_{{acc}}' if acc else 'p2tr')) + self.p2wpkh: MetricPattern25[T] = MetricPattern25(client, (f'p2wpkh_{{acc}}' if acc else 'p2wpkh')) + self.p2wsh: MetricPattern25[T] = MetricPattern25(client, (f'p2wsh_{{acc}}' if acc else 'p2wsh')) + +class BlockIntervalPattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.average: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'avg')) + self.max: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'max')) + self.median: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'median')) + self.min: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'min')) + self.pct10: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct10')) + self.pct25: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct25')) + self.pct75: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct75')) + self.pct90: MetricPattern25[T] = MetricPattern25(client, _m(acc, 'pct90')) + +class _0satsPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.activity: ActivityPattern2 = ActivityPattern2(client, acc) + self.addr_count: MetricPattern1[StoredU64] = MetricPattern1(client, _m(acc, 'addr_count')) + self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) + self.realized: RealizedPattern = RealizedPattern(client, acc) + self.relative: RelativePattern = RelativePattern(client, acc) + self.supply: SupplyPattern3 = SupplyPattern3(client, acc) + self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) + +class _0satsPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.activity: ActivityPattern2 = ActivityPattern2(client, acc) + self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) + self.realized: RealizedPattern = RealizedPattern(client, acc) + self.relative: RelativePattern = RelativePattern(client, acc) + self.supply: SupplyPattern3 = SupplyPattern3(client, acc) + self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) + +class _10yTo12yPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.activity: ActivityPattern2 = ActivityPattern2(client, acc) + self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, acc) + self.realized: RealizedPattern2 = RealizedPattern2(client, acc) + self.relative: RelativePattern2 = RelativePattern2(client, acc) + self.supply: SupplyPattern3 = SupplyPattern3(client, acc) + self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) + +class UpTo1dPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.activity: ActivityPattern2 = ActivityPattern2(client, acc) + self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, acc) + self.realized: RealizedPattern3 = RealizedPattern3(client, acc) + self.relative: RelativePattern2 = RelativePattern2(client, acc) + self.supply: SupplyPattern3 = SupplyPattern3(client, acc) + self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) + +class SegwitAdoptionPattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'avg')) + self.base: MetricPattern25[T] = MetricPattern25(client, acc) + self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) + self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max')) + self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min')) + self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) + +class ActivityPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.coinblocks_destroyed: BlockCountPattern[StoredF64] = BlockCountPattern(client, _m(acc, 'coinblocks_destroyed')) + self.coindays_destroyed: BlockCountPattern[StoredF64] = BlockCountPattern(client, _m(acc, 'coindays_destroyed')) + self.satblocks_destroyed: MetricPattern25[Sats] = MetricPattern25(client, _m(acc, 'satblocks_destroyed')) + self.satdays_destroyed: MetricPattern25[Sats] = MetricPattern25(client, _m(acc, 'satdays_destroyed')) + self.sent: SentPattern = SentPattern(client, _m(acc, 'sent')) + +class SupplyPattern3: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply')) + self.supply_half: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_half')) + self.supply_half_value: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_half')) + self.supply_value: SupplyValuePattern = SupplyValuePattern(client, _m(acc, 'supply')) + self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, _m(acc, 'utxo_count')) + +class SentPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.base: MetricPattern25[Sats] = MetricPattern25(client, acc) + self.bitcoin: BlockCountPattern[Bitcoin] = BlockCountPattern(client, _m(acc, 'btc')) + self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd')) + self.sats: SatsPattern = SatsPattern(client, acc) + +class OpreturnPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.base: MetricPattern25[Sats] = MetricPattern25(client, acc) + self.bitcoin: BitcoinPattern2[Bitcoin] = BitcoinPattern2(client, _m(acc, 'btc')) + self.dollars: BitcoinPattern2[Dollars] = BitcoinPattern2(client, _m(acc, 'usd')) + self.sats: SatsPattern4 = SatsPattern4(client, acc) + +class SupplyPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.base: MetricPattern25[Sats] = MetricPattern25(client, acc) + self.bitcoin: MetricPattern4[Bitcoin] = MetricPattern4(client, _m(acc, 'btc')) + self.dollars: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'usd')) + self.sats: MetricPattern4[Sats] = MetricPattern4(client, acc) + +class UnclaimedRewardsPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.bitcoin: BlockCountPattern[Bitcoin] = BlockCountPattern(client, _m(acc, 'btc')) + self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd')) + self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc) + +class CoinbasePattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.bitcoin: BitcoinPattern[Bitcoin] = BitcoinPattern(client, _m(acc, 'btc')) + self.dollars: BitcoinPattern[Dollars] = BitcoinPattern(client, _m(acc, 'usd')) + self.sats: BitcoinPattern[Sats] = BitcoinPattern(client, acc) + +class ActiveSupplyPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.bitcoin: MetricPattern1[Bitcoin] = MetricPattern1(client, _m(acc, 'btc')) + self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd')) + self.sats: MetricPattern1[Sats] = MetricPattern1(client, acc) + +class CostBasisPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.max_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis')) + self.min_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis')) + self.percentiles: PercentilesPattern = PercentilesPattern(client, _m(acc, 'cost_basis')) + +class BlockCountPattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.base: MetricPattern25[T] = MetricPattern25(client, acc) + self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) + self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) + +class BitcoinPattern2(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.base: MetricPattern25[T] = MetricPattern25(client, acc) + self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) + self.last: MetricPattern2[T] = MetricPattern2(client, acc) + +class SatsPattern4: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.cumulative: MetricPattern1[Sats] = MetricPattern1(client, _m(acc, 'cumulative')) + self.last: MetricPattern2[Sats] = MetricPattern2(client, acc) + +class CostBasisPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.max_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis')) + self.min_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis')) + +class SatsPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.cumulative: MetricPattern1[Sats] = MetricPattern1(client, _m(acc, 'cumulative')) + self.sum: MetricPattern2[Sats] = MetricPattern2(client, acc) + +class _1dReturns1mSdPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sd')) + self.sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sma')) + +class SupplyValuePattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.bitcoin: MetricPattern25[Bitcoin] = MetricPattern25(client, _m(acc, 'btc')) + self.dollars: MetricPattern25[Dollars] = MetricPattern25(client, _m(acc, 'usd')) + +class TotalRealizedPnlPattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.base: MetricPattern25[T] = MetricPattern25(client, acc) + self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) + +class RealizedPriceExtraPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.ratio: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio')) + +# Catalog tree classes + +class CatalogTree: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.computed: CatalogTree_Computed = CatalogTree_Computed(client, f'{base_path}_computed') + self.indexed: CatalogTree_Indexed = CatalogTree_Indexed(client, f'{base_path}_indexed') + +class CatalogTree_Computed: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.blocks: CatalogTree_Computed_Blocks = CatalogTree_Computed_Blocks(client, f'{base_path}_blocks') + self.cointime: CatalogTree_Computed_Cointime = CatalogTree_Computed_Cointime(client, f'{base_path}_cointime') + self.constants: CatalogTree_Computed_Constants = CatalogTree_Computed_Constants(client, f'{base_path}_constants') + self.distribution: CatalogTree_Computed_Distribution = CatalogTree_Computed_Distribution(client, f'{base_path}_distribution') + self.indexes: CatalogTree_Computed_Indexes = CatalogTree_Computed_Indexes(client, f'{base_path}_indexes') + self.inputs: CatalogTree_Computed_Inputs = CatalogTree_Computed_Inputs(client, f'{base_path}_inputs') + self.market: CatalogTree_Computed_Market = CatalogTree_Computed_Market(client, f'{base_path}_market') + self.outputs: CatalogTree_Computed_Outputs = CatalogTree_Computed_Outputs(client, f'{base_path}_outputs') + self.pools: CatalogTree_Computed_Pools = CatalogTree_Computed_Pools(client, f'{base_path}_pools') + self.positions: CatalogTree_Computed_Positions = CatalogTree_Computed_Positions(client, f'{base_path}_positions') + self.price: CatalogTree_Computed_Price = CatalogTree_Computed_Price(client, f'{base_path}_price') + self.scripts: CatalogTree_Computed_Scripts = CatalogTree_Computed_Scripts(client, f'{base_path}_scripts') + self.supply: CatalogTree_Computed_Supply = CatalogTree_Computed_Supply(client, f'{base_path}_supply') + self.transactions: CatalogTree_Computed_Transactions = CatalogTree_Computed_Transactions(client, f'{base_path}_transactions') + +class CatalogTree_Computed_Blocks: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.count: CatalogTree_Computed_Blocks_Count = CatalogTree_Computed_Blocks_Count(client, f'{base_path}_count') + self.difficulty: CatalogTree_Computed_Blocks_Difficulty = CatalogTree_Computed_Blocks_Difficulty(client, f'{base_path}_difficulty') + self.halving: CatalogTree_Computed_Blocks_Halving = CatalogTree_Computed_Blocks_Halving(client, f'{base_path}_halving') + self.interval: CatalogTree_Computed_Blocks_Interval = CatalogTree_Computed_Blocks_Interval(client, f'{base_path}_interval') + self.mining: CatalogTree_Computed_Blocks_Mining = CatalogTree_Computed_Blocks_Mining(client, f'{base_path}_mining') + self.rewards: CatalogTree_Computed_Blocks_Rewards = CatalogTree_Computed_Blocks_Rewards(client, f'{base_path}_rewards') + self.size: CatalogTree_Computed_Blocks_Size = CatalogTree_Computed_Blocks_Size(client, f'{base_path}_size') + self.time: CatalogTree_Computed_Blocks_Time = CatalogTree_Computed_Blocks_Time(client, f'{base_path}_time') + self.weight: CatalogTree_Computed_Blocks_Weight = CatalogTree_Computed_Blocks_Weight(client, f'{base_path}_weight') + +class CatalogTree_Computed_Blocks_Count: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._1m_block_count: MetricPattern4[StoredU32] = MetricPattern4(client, f'{base_path}_1m_block_count') + self._1w_block_count: MetricPattern4[StoredU32] = MetricPattern4(client, f'{base_path}_1w_block_count') + self._1y_block_count: MetricPattern4[StoredU32] = MetricPattern4(client, f'{base_path}_1y_block_count') + self._24h_block_count: MetricPattern25[StoredU32] = MetricPattern25(client, f'{base_path}_24h_block_count') + self.block_count: BlockCountPattern[StoredU32] = BlockCountPattern(client, 'block_count') + self.block_count_target: MetricPattern4[StoredU64] = MetricPattern4(client, f'{base_path}_block_count_target') + +class CatalogTree_Computed_Blocks_Difficulty: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.blocks_before_next_difficulty_adjustment: MetricPattern1[StoredU32] = MetricPattern1(client, f'{base_path}_blocks_before_next_difficulty_adjustment') + self.days_before_next_difficulty_adjustment: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_days_before_next_difficulty_adjustment') + self.difficultyepoch: MetricPattern4[DifficultyEpoch] = MetricPattern4(client, f'{base_path}_difficultyepoch') + +class CatalogTree_Computed_Blocks_Halving: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.blocks_before_next_halving: MetricPattern1[StoredU32] = MetricPattern1(client, f'{base_path}_blocks_before_next_halving') + self.days_before_next_halving: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_days_before_next_halving') + self.halvingepoch: MetricPattern4[HalvingEpoch] = MetricPattern4(client, f'{base_path}_halvingepoch') + +class CatalogTree_Computed_Blocks_Interval: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.block_interval: BlockIntervalPattern[Timestamp] = BlockIntervalPattern(client, 'block_interval') + self.interval: MetricPattern25[Timestamp] = MetricPattern25(client, f'{base_path}_interval') + +class CatalogTree_Computed_Blocks_Mining: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.difficulty: MetricPattern2[StoredF64] = MetricPattern2(client, f'{base_path}_difficulty') + self.difficulty_adjustment: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_difficulty_adjustment') + self.difficulty_as_hash: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_difficulty_as_hash') + self.hash_price_phs: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_price_phs') + self.hash_price_phs_min: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_price_phs_min') + self.hash_price_rebound: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_price_rebound') + self.hash_price_ths: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_price_ths') + self.hash_price_ths_min: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_price_ths_min') + self.hash_rate: MetricPattern1[StoredF64] = MetricPattern1(client, f'{base_path}_hash_rate') + self.hash_rate_1m_sma: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_hash_rate_1m_sma') + self.hash_rate_1w_sma: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_hash_rate_1w_sma') + self.hash_rate_1y_sma: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_hash_rate_1y_sma') + self.hash_rate_2m_sma: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_hash_rate_2m_sma') + self.hash_value_phs: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_phs') + self.hash_value_phs_min: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_phs_min') + self.hash_value_rebound: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_rebound') + self.hash_value_ths: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_ths') + self.hash_value_ths_min: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_ths_min') + +class CatalogTree_Computed_Blocks_Rewards: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._24h_coinbase_sum: MetricPattern25[Sats] = MetricPattern25(client, f'{base_path}_24h_coinbase_sum') + self._24h_coinbase_usd_sum: MetricPattern25[Dollars] = MetricPattern25(client, f'{base_path}_24h_coinbase_usd_sum') + self.coinbase: CoinbasePattern = CoinbasePattern(client, 'coinbase') + self.fee_dominance: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_fee_dominance') + self.subsidy: CoinbasePattern = CoinbasePattern(client, 'subsidy') + self.subsidy_dominance: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_subsidy_dominance') + self.subsidy_usd_1y_sma: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_subsidy_usd_1y_sma') + self.unclaimed_rewards: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, 'unclaimed_rewards') + +class CatalogTree_Computed_Blocks_Size: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.block_size: BlockSizePattern[StoredU64] = BlockSizePattern(client, 'block_size') + self.block_vbytes: BlockSizePattern[StoredU64] = BlockSizePattern(client, 'block_vbytes') + self.vbytes: MetricPattern25[StoredU64] = MetricPattern25(client, f'{base_path}_vbytes') + +class CatalogTree_Computed_Blocks_Time: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.date: MetricPattern25[Date] = MetricPattern25(client, f'{base_path}_date') + self.date_fixed: MetricPattern25[Date] = MetricPattern25(client, f'{base_path}_date_fixed') + self.timestamp: MetricPattern2[Timestamp] = MetricPattern2(client, f'{base_path}_timestamp') + self.timestamp_fixed: MetricPattern25[Timestamp] = MetricPattern25(client, f'{base_path}_timestamp_fixed') + +class CatalogTree_Computed_Blocks_Weight: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.block_fullness: BitcoinPattern[StoredF32] = BitcoinPattern(client, 'block_fullness') + self.block_weight: BlockSizePattern[Weight] = BlockSizePattern(client, 'block_weight') + +class CatalogTree_Computed_Cointime: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.activity: CatalogTree_Computed_Cointime_Activity = CatalogTree_Computed_Cointime_Activity(client, f'{base_path}_activity') + self.adjusted: CatalogTree_Computed_Cointime_Adjusted = CatalogTree_Computed_Cointime_Adjusted(client, f'{base_path}_adjusted') + self.cap: CatalogTree_Computed_Cointime_Cap = CatalogTree_Computed_Cointime_Cap(client, f'{base_path}_cap') + self.pricing: CatalogTree_Computed_Cointime_Pricing = CatalogTree_Computed_Cointime_Pricing(client, f'{base_path}_pricing') + self.supply: CatalogTree_Computed_Cointime_Supply = CatalogTree_Computed_Cointime_Supply(client, f'{base_path}_supply') + self.value: CatalogTree_Computed_Cointime_Value = CatalogTree_Computed_Cointime_Value(client, f'{base_path}_value') + +class CatalogTree_Computed_Cointime_Activity: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.activity_to_vaultedness_ratio: MetricPattern1[StoredF64] = MetricPattern1(client, f'{base_path}_activity_to_vaultedness_ratio') + self.coinblocks_created: BlockCountPattern[StoredF64] = BlockCountPattern(client, 'coinblocks_created') + self.coinblocks_stored: BlockCountPattern[StoredF64] = BlockCountPattern(client, 'coinblocks_stored') + self.liveliness: MetricPattern1[StoredF64] = MetricPattern1(client, f'{base_path}_liveliness') + self.vaultedness: MetricPattern1[StoredF64] = MetricPattern1(client, f'{base_path}_vaultedness') + +class CatalogTree_Computed_Cointime_Adjusted: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.cointime_adj_inflation_rate: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_cointime_adj_inflation_rate') + self.cointime_adj_tx_btc_velocity: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_cointime_adj_tx_btc_velocity') + self.cointime_adj_tx_usd_velocity: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_cointime_adj_tx_usd_velocity') + +class CatalogTree_Computed_Cointime_Cap: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.active_cap: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_active_cap') + self.cointime_cap: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_cointime_cap') + self.investor_cap: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_investor_cap') + self.thermo_cap: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_thermo_cap') + self.vaulted_cap: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_vaulted_cap') + +class CatalogTree_Computed_Cointime_Pricing: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.active_price: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_active_price') + self.active_price_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, 'active_price_ratio') + self.cointime_price: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_cointime_price') + self.cointime_price_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, 'cointime_price_ratio') + self.true_market_mean: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_true_market_mean') + self.true_market_mean_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, 'true_market_mean_ratio') + self.vaulted_price: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_vaulted_price') + self.vaulted_price_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, 'vaulted_price_ratio') + +class CatalogTree_Computed_Cointime_Supply: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.active_supply: ActiveSupplyPattern = ActiveSupplyPattern(client, 'active_supply') + self.vaulted_supply: ActiveSupplyPattern = ActiveSupplyPattern(client, 'vaulted_supply') + +class CatalogTree_Computed_Cointime_Value: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.cointime_value_created: BlockCountPattern[StoredF64] = BlockCountPattern(client, 'cointime_value_created') + self.cointime_value_destroyed: BlockCountPattern[StoredF64] = BlockCountPattern(client, 'cointime_value_destroyed') + self.cointime_value_stored: BlockCountPattern[StoredF64] = BlockCountPattern(client, 'cointime_value_stored') + +class CatalogTree_Computed_Constants: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.constant_0: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_0') + self.constant_1: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_1') + self.constant_100: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_100') + self.constant_2: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_2') + self.constant_3: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_3') + self.constant_38_2: Constant0Pattern[StoredF32] = Constant0Pattern(client, 'constant_38_2') + self.constant_4: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_4') + self.constant_50: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_50') + self.constant_600: Constant0Pattern[StoredU16] = Constant0Pattern(client, 'constant_600') + self.constant_61_8: Constant0Pattern[StoredF32] = Constant0Pattern(client, 'constant_61_8') + self.constant_minus_1: Constant0Pattern[StoredI16] = Constant0Pattern(client, 'constant_minus_1') + self.constant_minus_2: Constant0Pattern[StoredI16] = Constant0Pattern(client, 'constant_minus_2') + self.constant_minus_3: Constant0Pattern[StoredI16] = Constant0Pattern(client, 'constant_minus_3') + self.constant_minus_4: Constant0Pattern[StoredI16] = Constant0Pattern(client, 'constant_minus_4') + +class CatalogTree_Computed_Distribution: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.addr_count: MetricPattern1[StoredU64] = MetricPattern1(client, f'{base_path}_addr_count') + self.address_cohorts: CatalogTree_Computed_Distribution_AddressCohorts = CatalogTree_Computed_Distribution_AddressCohorts(client, f'{base_path}_address_cohorts') + self.addresses_data: CatalogTree_Computed_Distribution_AddressesData = CatalogTree_Computed_Distribution_AddressesData(client, f'{base_path}_addresses_data') + self.addresstype_to_height_to_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, '') + self.addresstype_to_height_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, '') + self.addresstype_to_indexes_to_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, '') + self.addresstype_to_indexes_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, '') + self.any_address_indexes: AddresstypeToHeightToAddrCountPattern[AnyAddressIndex] = AddresstypeToHeightToAddrCountPattern(client, 'anyaddressindex') + self.chain_state: MetricPattern25[SupplyState] = MetricPattern25(client, f'{base_path}_chain_state') + self.empty_addr_count: MetricPattern1[StoredU64] = MetricPattern1(client, f'{base_path}_empty_addr_count') + self.emptyaddressindex: MetricPattern46[EmptyAddressIndex] = MetricPattern46(client, f'{base_path}_emptyaddressindex') + self.loadedaddressindex: MetricPattern45[LoadedAddressIndex] = MetricPattern45(client, f'{base_path}_loadedaddressindex') + self.utxo_cohorts: CatalogTree_Computed_Distribution_UtxoCohorts = CatalogTree_Computed_Distribution_UtxoCohorts(client, f'{base_path}_utxo_cohorts') + +class CatalogTree_Computed_Distribution_AddressCohorts: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.amount_range: CatalogTree_Computed_Distribution_AddressCohorts_AmountRange = CatalogTree_Computed_Distribution_AddressCohorts_AmountRange(client, f'{base_path}_amount_range') + self.ge_amount: CatalogTree_Computed_Distribution_AddressCohorts_GeAmount = CatalogTree_Computed_Distribution_AddressCohorts_GeAmount(client, f'{base_path}_ge_amount') + self.lt_amount: CatalogTree_Computed_Distribution_AddressCohorts_LtAmount = CatalogTree_Computed_Distribution_AddressCohorts_LtAmount(client, f'{base_path}_lt_amount') + +class CatalogTree_Computed_Distribution_AddressCohorts_AmountRange: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._0sats: _0satsPattern = _0satsPattern(client, 'addrs_with_0sats') + self._100btc_to_1k_btc: _0satsPattern = _0satsPattern(client, 'addrs_above_100btc_under_1k_btc') + self._100k_btc_or_more: _0satsPattern = _0satsPattern(client, 'addrs_above_100k_btc') + self._100k_sats_to_1m_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_100k_sats_under_1m_sats') + self._100sats_to_1k_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_100sats_under_1k_sats') + self._10btc_to_100btc: _0satsPattern = _0satsPattern(client, 'addrs_above_10btc_under_100btc') + self._10k_btc_to_100k_btc: _0satsPattern = _0satsPattern(client, 'addrs_above_10k_btc_under_100k_btc') + self._10k_sats_to_100k_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_10k_sats_under_100k_sats') + self._10m_sats_to_1btc: _0satsPattern = _0satsPattern(client, 'addrs_above_10m_sats_under_1btc') + self._10sats_to_100sats: _0satsPattern = _0satsPattern(client, 'addrs_above_10sats_under_100sats') + self._1btc_to_10btc: _0satsPattern = _0satsPattern(client, 'addrs_above_1btc_under_10btc') + self._1k_btc_to_10k_btc: _0satsPattern = _0satsPattern(client, 'addrs_above_1k_btc_under_10k_btc') + self._1k_sats_to_10k_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_1k_sats_under_10k_sats') + self._1m_sats_to_10m_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_1m_sats_under_10m_sats') + self._1sat_to_10sats: _0satsPattern = _0satsPattern(client, 'addrs_above_1sat_under_10sats') + +class CatalogTree_Computed_Distribution_AddressCohorts_GeAmount: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._100btc: _0satsPattern = _0satsPattern(client, 'addrs_above_100btc') + self._100k_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_100k_sats') + self._100sats: _0satsPattern = _0satsPattern(client, 'addrs_above_100sats') + self._10btc: _0satsPattern = _0satsPattern(client, 'addrs_above_10btc') + self._10k_btc: _0satsPattern = _0satsPattern(client, 'addrs_above_10k_btc') + self._10k_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_10k_sats') + self._10m_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_10m_sats') + self._10sats: _0satsPattern = _0satsPattern(client, 'addrs_above_10sats') + self._1btc: _0satsPattern = _0satsPattern(client, 'addrs_above_1btc') + self._1k_btc: _0satsPattern = _0satsPattern(client, 'addrs_above_1k_btc') + self._1k_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_1k_sats') + self._1m_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_1m_sats') + self._1sat: _0satsPattern = _0satsPattern(client, 'addrs_above_1sat') + +class CatalogTree_Computed_Distribution_AddressCohorts_LtAmount: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._100btc: _0satsPattern = _0satsPattern(client, 'addrs_under_100btc') + self._100k_btc: _0satsPattern = _0satsPattern(client, 'addrs_under_100k_btc') + self._100k_sats: _0satsPattern = _0satsPattern(client, 'addrs_under_100k_sats') + self._100sats: _0satsPattern = _0satsPattern(client, 'addrs_under_100sats') + self._10btc: _0satsPattern = _0satsPattern(client, 'addrs_under_10btc') + self._10k_btc: _0satsPattern = _0satsPattern(client, 'addrs_under_10k_btc') + self._10k_sats: _0satsPattern = _0satsPattern(client, 'addrs_under_10k_sats') + self._10m_sats: _0satsPattern = _0satsPattern(client, 'addrs_under_10m_sats') + self._10sats: _0satsPattern = _0satsPattern(client, 'addrs_under_10sats') + self._1btc: _0satsPattern = _0satsPattern(client, 'addrs_under_1btc') + self._1k_btc: _0satsPattern = _0satsPattern(client, 'addrs_under_1k_btc') + self._1k_sats: _0satsPattern = _0satsPattern(client, 'addrs_under_1k_sats') + self._1m_sats: _0satsPattern = _0satsPattern(client, 'addrs_under_1m_sats') + +class CatalogTree_Computed_Distribution_AddressesData: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.empty: MetricPattern46[EmptyAddressData] = MetricPattern46(client, f'{base_path}_empty') + self.loaded: MetricPattern45[LoadedAddressData] = MetricPattern45(client, f'{base_path}_loaded') + +class CatalogTree_Computed_Distribution_UtxoCohorts: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.age_range: CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange = CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange(client, f'{base_path}_age_range') + self.all: CatalogTree_Computed_Distribution_UtxoCohorts_All = CatalogTree_Computed_Distribution_UtxoCohorts_All(client, f'{base_path}_all') + self.amount_range: CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange = CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange(client, f'{base_path}_amount_range') + self.epoch: CatalogTree_Computed_Distribution_UtxoCohorts_Epoch = CatalogTree_Computed_Distribution_UtxoCohorts_Epoch(client, f'{base_path}_epoch') + self.ge_amount: CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount = CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount(client, f'{base_path}_ge_amount') + self.lt_amount: CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount = CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount(client, f'{base_path}_lt_amount') + self.max_age: CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge = CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge(client, f'{base_path}_max_age') + self.min_age: CatalogTree_Computed_Distribution_UtxoCohorts_MinAge = CatalogTree_Computed_Distribution_UtxoCohorts_MinAge(client, f'{base_path}_min_age') + self.term: CatalogTree_Computed_Distribution_UtxoCohorts_Term = CatalogTree_Computed_Distribution_UtxoCohorts_Term(client, f'{base_path}_term') + self.type_: CatalogTree_Computed_Distribution_UtxoCohorts_Type = CatalogTree_Computed_Distribution_UtxoCohorts_Type(client, f'{base_path}_type_') + self.year: CatalogTree_Computed_Distribution_UtxoCohorts_Year = CatalogTree_Computed_Distribution_UtxoCohorts_Year(client, f'{base_path}_year') + +class CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._10y_to_12y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_10y_up_to_12y_old') + self._12y_to_15y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_12y_up_to_15y_old') + self._1d_to_1w: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1d_up_to_1w_old') + self._1m_to_2m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1m_up_to_2m_old') + self._1w_to_1m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1w_up_to_1m_old') + self._1y_to_2y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1y_up_to_2y_old') + self._2m_to_3m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_2m_up_to_3m_old') + self._2y_to_3y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_2y_up_to_3y_old') + self._3m_to_4m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_3m_up_to_4m_old') + self._3y_to_4y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_3y_up_to_4y_old') + self._4m_to_5m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_4m_up_to_5m_old') + self._4y_to_5y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_4y_up_to_5y_old') + self._5m_to_6m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_5m_up_to_6m_old') + self._5y_to_6y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_5y_up_to_6y_old') + self._6m_to_1y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_6m_up_to_1y_old') + self._6y_to_7y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_6y_up_to_7y_old') + self._7y_to_8y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_7y_up_to_8y_old') + self._8y_to_10y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_8y_up_to_10y_old') + self.from_15y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_15y_old') + self.up_to_1d: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_1d_old') + +class CatalogTree_Computed_Distribution_UtxoCohorts_All: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.activity: ActivityPattern2 = ActivityPattern2(client, '') + self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, '') + self.realized: RealizedPattern3 = RealizedPattern3(client, '') + self.relative: CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative = CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative(client, f'{base_path}_relative') + self.supply: SupplyPattern3 = SupplyPattern3(client, '') + self.unrealized: UnrealizedPattern = UnrealizedPattern(client, '') + +class CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_neg_unrealized_loss_rel_to_own_total_unrealized_pnl') + self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, f'{base_path}_net_unrealized_pnl_rel_to_own_total_unrealized_pnl') + self.supply_in_loss_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, f'{base_path}_supply_in_loss_rel_to_own_supply') + self.supply_in_profit_rel_to_own_supply: MetricPattern5[StoredF64] = MetricPattern5(client, f'{base_path}_supply_in_profit_rel_to_own_supply') + self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_unrealized_loss_rel_to_own_total_unrealized_pnl') + self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_unrealized_profit_rel_to_own_total_unrealized_pnl') + +class CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._0sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_with_0sats') + self._100btc_to_1k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100btc_under_1k_btc') + self._100k_btc_or_more: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100k_btc') + self._100k_sats_to_1m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100k_sats_under_1m_sats') + self._100sats_to_1k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100sats_under_1k_sats') + self._10btc_to_100btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10btc_under_100btc') + self._10k_btc_to_100k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10k_btc_under_100k_btc') + self._10k_sats_to_100k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10k_sats_under_100k_sats') + self._10m_sats_to_1btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10m_sats_under_1btc') + self._10sats_to_100sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10sats_under_100sats') + self._1btc_to_10btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1btc_under_10btc') + self._1k_btc_to_10k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1k_btc_under_10k_btc') + self._1k_sats_to_10k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1k_sats_under_10k_sats') + self._1m_sats_to_10m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1m_sats_under_10m_sats') + self._1sat_to_10sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1sat_under_10sats') + +class CatalogTree_Computed_Distribution_UtxoCohorts_Epoch: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._0: _10yTo12yPattern = _10yTo12yPattern(client, 'epoch_0') + self._1: _10yTo12yPattern = _10yTo12yPattern(client, 'epoch_1') + self._2: _10yTo12yPattern = _10yTo12yPattern(client, 'epoch_2') + self._3: _10yTo12yPattern = _10yTo12yPattern(client, 'epoch_3') + self._4: _10yTo12yPattern = _10yTo12yPattern(client, 'epoch_4') + +class CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._100btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100btc') + self._100k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100k_sats') + self._100sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100sats') + self._10btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10btc') + self._10k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10k_btc') + self._10k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10k_sats') + self._10m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10m_sats') + self._10sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_10sats') + self._1btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1btc') + self._1k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1k_btc') + self._1k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1k_sats') + self._1m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1m_sats') + self._1sat: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1sat') + +class CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._100btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_100btc') + self._100k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_100k_btc') + self._100k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_100k_sats') + self._100sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_100sats') + self._10btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10btc') + self._10k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10k_btc') + self._10k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10k_sats') + self._10m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10m_sats') + self._10sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_10sats') + self._1btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_1btc') + self._1k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_1k_btc') + self._1k_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_1k_sats') + self._1m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_under_1m_sats') + +class CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._10y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_10y_old') + self._12y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_12y_old') + self._15y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_15y_old') + self._1m: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_1m_old') + self._1w: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_1w_old') + self._1y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_1y_old') + self._2m: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_2m_old') + self._2y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_2y_old') + self._3m: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_3m_old') + self._3y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_3y_old') + self._4m: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_4m_old') + self._4y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_4y_old') + self._5m: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_5m_old') + self._5y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_5y_old') + self._6m: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_6m_old') + self._6y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_6y_old') + self._7y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_7y_old') + self._8y: UpTo1dPattern = UpTo1dPattern(client, 'utxos_up_to_8y_old') + +class CatalogTree_Computed_Distribution_UtxoCohorts_MinAge: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._10y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_10y_old') + self._12y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_12y_old') + self._1d: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1d_old') + self._1m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1m_old') + self._1w: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1w_old') + self._1y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_1y_old') + self._2m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_2m_old') + self._2y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_2y_old') + self._3m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_3m_old') + self._3y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_3y_old') + self._4m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_4m_old') + self._4y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_4y_old') + self._5m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_5m_old') + self._5y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_5y_old') + self._6m: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_6m_old') + self._6y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_6y_old') + self._7y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_7y_old') + self._8y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_8y_old') + +class CatalogTree_Computed_Distribution_UtxoCohorts_Term: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.long: UpTo1dPattern = UpTo1dPattern(client, 'lth') + self.short: UpTo1dPattern = UpTo1dPattern(client, 'sth') + +class CatalogTree_Computed_Distribution_UtxoCohorts_Type: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.empty: _0satsPattern2 = _0satsPattern2(client, 'empty_outputs') + self.p2a: _0satsPattern2 = _0satsPattern2(client, 'p2a') + self.p2ms: _0satsPattern2 = _0satsPattern2(client, 'p2ms') + self.p2pk33: _0satsPattern2 = _0satsPattern2(client, 'p2pk33') + self.p2pk65: _0satsPattern2 = _0satsPattern2(client, 'p2pk65') + self.p2pkh: _0satsPattern2 = _0satsPattern2(client, 'p2pkh') + self.p2sh: _0satsPattern2 = _0satsPattern2(client, 'p2sh') + self.p2tr: _0satsPattern2 = _0satsPattern2(client, 'p2tr') + self.p2wpkh: _0satsPattern2 = _0satsPattern2(client, 'p2wpkh') + self.p2wsh: _0satsPattern2 = _0satsPattern2(client, 'p2wsh') + self.unknown: _0satsPattern2 = _0satsPattern2(client, 'unknown_outputs') + +class CatalogTree_Computed_Distribution_UtxoCohorts_Year: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._2009: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2009') + self._2010: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2010') + self._2011: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2011') + self._2012: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2012') + self._2013: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2013') + self._2014: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2014') + self._2015: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2015') + self._2016: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2016') + self._2017: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2017') + self._2018: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2018') + self._2019: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2019') + self._2020: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2020') + self._2021: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2021') + self._2022: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2022') + self._2023: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2023') + self._2024: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2024') + self._2025: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2025') + self._2026: _10yTo12yPattern = _10yTo12yPattern(client, 'year_2026') + +class CatalogTree_Computed_Indexes: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.address: CatalogTree_Computed_Indexes_Address = CatalogTree_Computed_Indexes_Address(client, f'{base_path}_address') + self.block: CatalogTree_Computed_Indexes_Block = CatalogTree_Computed_Indexes_Block(client, f'{base_path}_block') + self.time: CatalogTree_Computed_Indexes_Time = CatalogTree_Computed_Indexes_Time(client, f'{base_path}_time') + self.transaction: CatalogTree_Computed_Indexes_Transaction = CatalogTree_Computed_Indexes_Transaction(client, f'{base_path}_transaction') + +class CatalogTree_Computed_Indexes_Address: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.emptyoutputindex: MetricPattern24[EmptyOutputIndex] = MetricPattern24(client, f'{base_path}_emptyoutputindex') + self.opreturnindex: MetricPattern28[OpReturnIndex] = MetricPattern28(client, f'{base_path}_opreturnindex') + self.p2aaddressindex: MetricPattern30[P2AAddressIndex] = MetricPattern30(client, f'{base_path}_p2aaddressindex') + self.p2msoutputindex: MetricPattern31[P2MSOutputIndex] = MetricPattern31(client, f'{base_path}_p2msoutputindex') + self.p2pk33addressindex: MetricPattern32[P2PK33AddressIndex] = MetricPattern32(client, f'{base_path}_p2pk33addressindex') + self.p2pk65addressindex: MetricPattern33[P2PK65AddressIndex] = MetricPattern33(client, f'{base_path}_p2pk65addressindex') + self.p2pkhaddressindex: MetricPattern34[P2PKHAddressIndex] = MetricPattern34(client, f'{base_path}_p2pkhaddressindex') + self.p2shaddressindex: MetricPattern35[P2SHAddressIndex] = MetricPattern35(client, f'{base_path}_p2shaddressindex') + self.p2traddressindex: MetricPattern36[P2TRAddressIndex] = MetricPattern36(client, f'{base_path}_p2traddressindex') + self.p2wpkhaddressindex: MetricPattern37[P2WPKHAddressIndex] = MetricPattern37(client, f'{base_path}_p2wpkhaddressindex') + self.p2wshaddressindex: MetricPattern38[P2WSHAddressIndex] = MetricPattern38(client, f'{base_path}_p2wshaddressindex') + self.unknownoutputindex: MetricPattern42[UnknownOutputIndex] = MetricPattern42(client, f'{base_path}_unknownoutputindex') + +class CatalogTree_Computed_Indexes_Block: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.dateindex: MetricPattern25[DateIndex] = MetricPattern25(client, f'{base_path}_dateindex') + self.difficultyepoch: MetricPattern14[DifficultyEpoch] = MetricPattern14(client, f'{base_path}_difficultyepoch') + self.first_height: MetricPattern13[Height] = MetricPattern13(client, f'{base_path}_first_height') + self.halvingepoch: MetricPattern15[HalvingEpoch] = MetricPattern15(client, f'{base_path}_halvingepoch') + self.height: MetricPattern25[Height] = MetricPattern25(client, f'{base_path}_height') + self.height_count: MetricPattern23[StoredU64] = MetricPattern23(client, f'{base_path}_height_count') + self.txindex_count: MetricPattern25[StoredU64] = MetricPattern25(client, f'{base_path}_txindex_count') + +class CatalogTree_Computed_Indexes_Time: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.date: MetricPattern21[Date] = MetricPattern21(client, f'{base_path}_date') + self.dateindex: MetricPattern21[DateIndex] = MetricPattern21(client, f'{base_path}_dateindex') + self.dateindex_count: MetricPattern19[StoredU64] = MetricPattern19(client, f'{base_path}_dateindex_count') + self.decadeindex: MetricPattern12[DecadeIndex] = MetricPattern12(client, f'{base_path}_decadeindex') + self.first_dateindex: MetricPattern19[DateIndex] = MetricPattern19(client, f'{base_path}_first_dateindex') + self.first_height: MetricPattern21[Height] = MetricPattern21(client, f'{base_path}_first_height') + self.first_monthindex: MetricPattern8[MonthIndex] = MetricPattern8(client, f'{base_path}_first_monthindex') + self.first_yearindex: MetricPattern22[YearIndex] = MetricPattern22(client, f'{base_path}_first_yearindex') + self.height_count: MetricPattern21[StoredU64] = MetricPattern21(client, f'{base_path}_height_count') + self.monthindex: MetricPattern10[MonthIndex] = MetricPattern10(client, f'{base_path}_monthindex') + self.monthindex_count: MetricPattern8[StoredU64] = MetricPattern8(client, f'{base_path}_monthindex_count') + self.quarterindex: MetricPattern17[QuarterIndex] = MetricPattern17(client, f'{base_path}_quarterindex') + self.semesterindex: MetricPattern18[SemesterIndex] = MetricPattern18(client, f'{base_path}_semesterindex') + self.weekindex: MetricPattern11[WeekIndex] = MetricPattern11(client, f'{base_path}_weekindex') + self.yearindex: MetricPattern20[YearIndex] = MetricPattern20(client, f'{base_path}_yearindex') + self.yearindex_count: MetricPattern22[StoredU64] = MetricPattern22(client, f'{base_path}_yearindex_count') + +class CatalogTree_Computed_Indexes_Transaction: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.input_count: MetricPattern41[StoredU64] = MetricPattern41(client, f'{base_path}_input_count') + self.output_count: MetricPattern41[StoredU64] = MetricPattern41(client, f'{base_path}_output_count') + self.txindex: MetricPattern41[TxIndex] = MetricPattern41(client, f'{base_path}_txindex') + self.txinindex: MetricPattern26[TxInIndex] = MetricPattern26(client, f'{base_path}_txinindex') + self.txoutindex: MetricPattern29[TxOutIndex] = MetricPattern29(client, f'{base_path}_txoutindex') + +class CatalogTree_Computed_Inputs: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.count: CatalogTree_Computed_Inputs_Count = CatalogTree_Computed_Inputs_Count(client, f'{base_path}_count') + self.spent: CatalogTree_Computed_Inputs_Spent = CatalogTree_Computed_Inputs_Spent(client, f'{base_path}_spent') + +class CatalogTree_Computed_Inputs_Count: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.count: BlockSizePattern[StoredU64] = BlockSizePattern(client, 'input_count') + +class CatalogTree_Computed_Inputs_Spent: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.txoutindex: MetricPattern26[TxOutIndex] = MetricPattern26(client, f'{base_path}_txoutindex') + self.value: MetricPattern26[Sats] = MetricPattern26(client, f'{base_path}_value') + +class CatalogTree_Computed_Market: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.ath: CatalogTree_Computed_Market_Ath = CatalogTree_Computed_Market_Ath(client, f'{base_path}_ath') + self.dca: CatalogTree_Computed_Market_Dca = CatalogTree_Computed_Market_Dca(client, f'{base_path}_dca') + self.indicators: CatalogTree_Computed_Market_Indicators = CatalogTree_Computed_Market_Indicators(client, f'{base_path}_indicators') + self.lookback: CatalogTree_Computed_Market_Lookback = CatalogTree_Computed_Market_Lookback(client, f'{base_path}_lookback') + self.moving_average: CatalogTree_Computed_Market_MovingAverage = CatalogTree_Computed_Market_MovingAverage(client, f'{base_path}_moving_average') + self.range: CatalogTree_Computed_Market_Range = CatalogTree_Computed_Market_Range(client, f'{base_path}_range') + self.returns: CatalogTree_Computed_Market_Returns = CatalogTree_Computed_Market_Returns(client, f'{base_path}_returns') + self.volatility: CatalogTree_Computed_Market_Volatility = CatalogTree_Computed_Market_Volatility(client, f'{base_path}_volatility') + +class CatalogTree_Computed_Market_Ath: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.days_since_price_ath: MetricPattern4[StoredU16] = MetricPattern4(client, f'{base_path}_days_since_price_ath') + self.max_days_between_price_aths: MetricPattern4[StoredU16] = MetricPattern4(client, f'{base_path}_max_days_between_price_aths') + self.max_years_between_price_aths: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_max_years_between_price_aths') + self.price_ath: MetricPattern3[Dollars] = MetricPattern3(client, f'{base_path}_price_ath') + self.price_drawdown: MetricPattern3[StoredF32] = MetricPattern3(client, f'{base_path}_price_drawdown') + +class CatalogTree_Computed_Market_Dca: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._10y_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__10y_dca_avg_price') + self._10y_dca_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__10y_dca_cagr') + self._10y_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__10y_dca_returns') + self._10y_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__10y_dca_stack') + self._1m_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__1m_dca_avg_price') + self._1m_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__1m_dca_returns') + self._1m_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__1m_dca_stack') + self._1w_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__1w_dca_avg_price') + self._1w_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__1w_dca_returns') + self._1w_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__1w_dca_stack') + self._1y_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__1y_dca_avg_price') + self._1y_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__1y_dca_returns') + self._1y_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__1y_dca_stack') + self._2y_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__2y_dca_avg_price') + self._2y_dca_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__2y_dca_cagr') + self._2y_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__2y_dca_returns') + self._2y_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__2y_dca_stack') + self._3m_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__3m_dca_avg_price') + self._3m_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__3m_dca_returns') + self._3m_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__3m_dca_stack') + self._3y_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__3y_dca_avg_price') + self._3y_dca_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__3y_dca_cagr') + self._3y_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__3y_dca_returns') + self._3y_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__3y_dca_stack') + self._4y_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__4y_dca_avg_price') + self._4y_dca_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__4y_dca_cagr') + self._4y_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__4y_dca_returns') + self._4y_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__4y_dca_stack') + self._5y_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__5y_dca_avg_price') + self._5y_dca_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__5y_dca_cagr') + self._5y_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__5y_dca_returns') + self._5y_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__5y_dca_stack') + self._6m_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__6m_dca_avg_price') + self._6m_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__6m_dca_returns') + self._6m_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__6m_dca_stack') + self._6y_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__6y_dca_avg_price') + self._6y_dca_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__6y_dca_cagr') + self._6y_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__6y_dca_returns') + self._6y_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__6y_dca_stack') + self._8y_dca_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}__8y_dca_avg_price') + self._8y_dca_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__8y_dca_cagr') + self._8y_dca_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__8y_dca_returns') + self._8y_dca_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}__8y_dca_stack') + self.dca_class_2015_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2015_avg_price') + self.dca_class_2015_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2015_returns') + self.dca_class_2015_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2015_stack') + self.dca_class_2016_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2016_avg_price') + self.dca_class_2016_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2016_returns') + self.dca_class_2016_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2016_stack') + self.dca_class_2017_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2017_avg_price') + self.dca_class_2017_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2017_returns') + self.dca_class_2017_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2017_stack') + self.dca_class_2018_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2018_avg_price') + self.dca_class_2018_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2018_returns') + self.dca_class_2018_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2018_stack') + self.dca_class_2019_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2019_avg_price') + self.dca_class_2019_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2019_returns') + self.dca_class_2019_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2019_stack') + self.dca_class_2020_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2020_avg_price') + self.dca_class_2020_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2020_returns') + self.dca_class_2020_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2020_stack') + self.dca_class_2021_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2021_avg_price') + self.dca_class_2021_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2021_returns') + self.dca_class_2021_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2021_stack') + self.dca_class_2022_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2022_avg_price') + self.dca_class_2022_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2022_returns') + self.dca_class_2022_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2022_stack') + self.dca_class_2023_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2023_avg_price') + self.dca_class_2023_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2023_returns') + self.dca_class_2023_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2023_stack') + self.dca_class_2024_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2024_avg_price') + self.dca_class_2024_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2024_returns') + self.dca_class_2024_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2024_stack') + self.dca_class_2025_avg_price: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_dca_class_2025_avg_price') + self.dca_class_2025_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_dca_class_2025_returns') + self.dca_class_2025_stack: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_dca_class_2025_stack') + +class CatalogTree_Computed_Market_Indicators: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.gini: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_gini') + self.macd_histogram: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_macd_histogram') + self.macd_line: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_macd_line') + self.macd_signal: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_macd_signal') + self.nvt: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_nvt') + self.pi_cycle: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_pi_cycle') + self.puell_multiple: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_puell_multiple') + self.rsi_14d: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_rsi_14d') + self.rsi_14d_max: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_rsi_14d_max') + self.rsi_14d_min: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_rsi_14d_min') + self.rsi_avg_gain_14d: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_rsi_avg_gain_14d') + self.rsi_avg_loss_14d: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_rsi_avg_loss_14d') + self.rsi_gains: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_rsi_gains') + self.rsi_losses: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_rsi_losses') + self.stoch_d: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_stoch_d') + self.stoch_k: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_stoch_k') + self.stoch_rsi: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_stoch_rsi') + self.stoch_rsi_d: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_stoch_rsi_d') + self.stoch_rsi_k: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_stoch_rsi_k') + +class CatalogTree_Computed_Market_Lookback: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.price_10y_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_10y_ago') + self.price_1d_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1d_ago') + self.price_1m_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1m_ago') + self.price_1w_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1w_ago') + self.price_1y_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1y_ago') + self.price_2y_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_2y_ago') + self.price_3m_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_3m_ago') + self.price_3y_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_3y_ago') + self.price_4y_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_4y_ago') + self.price_5y_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_5y_ago') + self.price_6m_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_6m_ago') + self.price_6y_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_6y_ago') + self.price_8y_ago: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_8y_ago') + +class CatalogTree_Computed_Market_MovingAverage: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.price_111d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_111d_sma') + self.price_12d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_12d_ema') + self.price_13d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_13d_ema') + self.price_13d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_13d_sma') + self.price_144d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_144d_ema') + self.price_144d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_144d_sma') + self.price_1m_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_1m_ema') + self.price_1m_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_1m_sma') + self.price_1w_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_1w_ema') + self.price_1w_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_1w_sma') + self.price_1y_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_1y_ema') + self.price_1y_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_1y_sma') + self.price_200d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_200d_ema') + self.price_200d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_200d_sma') + self.price_200d_sma_x0_8: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_200d_sma_x0_8') + self.price_200d_sma_x2_4: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_200d_sma_x2_4') + self.price_200w_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_200w_ema') + self.price_200w_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_200w_sma') + self.price_21d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_21d_ema') + self.price_21d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_21d_sma') + self.price_26d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_26d_ema') + self.price_2y_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_2y_ema') + self.price_2y_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_2y_sma') + self.price_34d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_34d_ema') + self.price_34d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_34d_sma') + self.price_350d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_350d_sma') + self.price_350d_sma_x2: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_350d_sma_x2') + self.price_4y_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_4y_ema') + self.price_4y_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_4y_sma') + self.price_55d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_55d_ema') + self.price_55d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_55d_sma') + self.price_89d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_89d_ema') + self.price_89d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_89d_sma') + self.price_8d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_8d_ema') + self.price_8d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_8d_sma') + +class CatalogTree_Computed_Market_Range: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.price_1m_max: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1m_max') + self.price_1m_min: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1m_min') + self.price_1w_max: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1w_max') + self.price_1w_min: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1w_min') + self.price_1y_max: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1y_max') + self.price_1y_min: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1y_min') + self.price_2w_choppiness_index: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_price_2w_choppiness_index') + self.price_2w_max: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_2w_max') + self.price_2w_min: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_2w_min') + self.price_true_range: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_price_true_range') + self.price_true_range_2w_sum: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_price_true_range_2w_sum') + +class CatalogTree_Computed_Market_Returns: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._1d_returns_1m_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, '1d_returns_1m_sd') + self._1d_returns_1w_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, '1d_returns_1w_sd') + self._1d_returns_1y_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, '1d_returns_1y_sd') + self._10y_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__10y_cagr') + self._10y_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__10y_price_returns') + self._1d_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__1d_price_returns') + self._1m_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__1m_price_returns') + self._1w_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__1w_price_returns') + self._1y_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__1y_price_returns') + self._2y_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__2y_cagr') + self._2y_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__2y_price_returns') + self._3m_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__3m_price_returns') + self._3y_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__3y_cagr') + self._3y_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__3y_price_returns') + self._4y_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__4y_cagr') + self._4y_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__4y_price_returns') + self._5y_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__5y_cagr') + self._5y_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__5y_price_returns') + self._6m_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__6m_price_returns') + self._6y_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__6y_cagr') + self._6y_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__6y_price_returns') + self._8y_cagr: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__8y_cagr') + self._8y_price_returns: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}__8y_price_returns') + self.downside_1m_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, 'downside_1m_sd') + self.downside_1w_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, 'downside_1w_sd') + self.downside_1y_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, 'downside_1y_sd') + self.downside_returns: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_downside_returns') + +class CatalogTree_Computed_Market_Volatility: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.price_1m_volatility: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_price_1m_volatility') + self.price_1w_volatility: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_price_1w_volatility') + self.price_1y_volatility: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_price_1y_volatility') + self.sharpe_1m: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_sharpe_1m') + self.sharpe_1w: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_sharpe_1w') + self.sharpe_1y: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_sharpe_1y') + self.sortino_1m: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_sortino_1m') + self.sortino_1w: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_sortino_1w') + self.sortino_1y: MetricPattern21[StoredF32] = MetricPattern21(client, f'{base_path}_sortino_1y') + +class CatalogTree_Computed_Outputs: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.count: CatalogTree_Computed_Outputs_Count = CatalogTree_Computed_Outputs_Count(client, f'{base_path}_count') + self.spent: CatalogTree_Computed_Outputs_Spent = CatalogTree_Computed_Outputs_Spent(client, f'{base_path}_spent') + +class CatalogTree_Computed_Outputs_Count: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.count: BlockSizePattern[StoredU64] = BlockSizePattern(client, 'output_count') + self.utxo_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'exact_utxo_count') + +class CatalogTree_Computed_Outputs_Spent: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.txinindex: MetricPattern29[TxInIndex] = MetricPattern29(client, f'{base_path}_txinindex') + +class CatalogTree_Computed_Pools: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.pool: MetricPattern25[PoolSlug] = MetricPattern25(client, f'{base_path}_pool') + self.vecs: CatalogTree_Computed_Pools_Vecs = CatalogTree_Computed_Pools_Vecs(client, f'{base_path}_vecs') + +class CatalogTree_Computed_Pools_Vecs: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.axbt: AXbtPattern = AXbtPattern(client, 'axbt') + self.aaopool: AXbtPattern = AXbtPattern(client, 'aaopool') + self.antpool: AXbtPattern = AXbtPattern(client, 'antpool') + self.arkpool: AXbtPattern = AXbtPattern(client, 'arkpool') + self.asicminer: AXbtPattern = AXbtPattern(client, 'asicminer') + self.batpool: AXbtPattern = AXbtPattern(client, 'batpool') + self.bcmonster: AXbtPattern = AXbtPattern(client, 'bcmonster') + self.bcpoolio: AXbtPattern = AXbtPattern(client, 'bcpoolio') + self.binancepool: AXbtPattern = AXbtPattern(client, 'binancepool') + self.bitclub: AXbtPattern = AXbtPattern(client, 'bitclub') + self.bitfufupool: AXbtPattern = AXbtPattern(client, 'bitfufupool') + self.bitfury: AXbtPattern = AXbtPattern(client, 'bitfury') + self.bitminter: AXbtPattern = AXbtPattern(client, 'bitminter') + self.bitalo: AXbtPattern = AXbtPattern(client, 'bitalo') + self.bitcoinaffiliatenetwork: AXbtPattern = AXbtPattern(client, 'bitcoinaffiliatenetwork') + self.bitcoincom: AXbtPattern = AXbtPattern(client, 'bitcoincom') + self.bitcoinindia: AXbtPattern = AXbtPattern(client, 'bitcoinindia') + self.bitcoinrussia: AXbtPattern = AXbtPattern(client, 'bitcoinrussia') + self.bitcoinukraine: AXbtPattern = AXbtPattern(client, 'bitcoinukraine') + self.bitfarms: AXbtPattern = AXbtPattern(client, 'bitfarms') + self.bitparking: AXbtPattern = AXbtPattern(client, 'bitparking') + self.bitsolo: AXbtPattern = AXbtPattern(client, 'bitsolo') + self.bixin: AXbtPattern = AXbtPattern(client, 'bixin') + self.blockfills: AXbtPattern = AXbtPattern(client, 'blockfills') + self.braiinspool: AXbtPattern = AXbtPattern(client, 'braiinspool') + self.bravomining: AXbtPattern = AXbtPattern(client, 'bravomining') + self.btpool: AXbtPattern = AXbtPattern(client, 'btpool') + self.btccom: AXbtPattern = AXbtPattern(client, 'btccom') + self.btcdig: AXbtPattern = AXbtPattern(client, 'btcdig') + self.btcguild: AXbtPattern = AXbtPattern(client, 'btcguild') + self.btclab: AXbtPattern = AXbtPattern(client, 'btclab') + self.btcmp: AXbtPattern = AXbtPattern(client, 'btcmp') + self.btcnuggets: AXbtPattern = AXbtPattern(client, 'btcnuggets') + self.btcpoolparty: AXbtPattern = AXbtPattern(client, 'btcpoolparty') + self.btcserv: AXbtPattern = AXbtPattern(client, 'btcserv') + self.btctop: AXbtPattern = AXbtPattern(client, 'btctop') + self.btcc: AXbtPattern = AXbtPattern(client, 'btcc') + self.bwpool: AXbtPattern = AXbtPattern(client, 'bwpool') + self.bytepool: AXbtPattern = AXbtPattern(client, 'bytepool') + self.canoe: AXbtPattern = AXbtPattern(client, 'canoe') + self.canoepool: AXbtPattern = AXbtPattern(client, 'canoepool') + self.carbonnegative: AXbtPattern = AXbtPattern(client, 'carbonnegative') + self.ckpool: AXbtPattern = AXbtPattern(client, 'ckpool') + self.cloudhashing: AXbtPattern = AXbtPattern(client, 'cloudhashing') + self.coinlab: AXbtPattern = AXbtPattern(client, 'coinlab') + self.cointerra: AXbtPattern = AXbtPattern(client, 'cointerra') + self.connectbtc: AXbtPattern = AXbtPattern(client, 'connectbtc') + self.dpool: AXbtPattern = AXbtPattern(client, 'dpool') + self.dcexploration: AXbtPattern = AXbtPattern(client, 'dcexploration') + self.dcex: AXbtPattern = AXbtPattern(client, 'dcex') + self.digitalbtc: AXbtPattern = AXbtPattern(client, 'digitalbtc') + self.digitalxmintsy: AXbtPattern = AXbtPattern(client, 'digitalxmintsy') + self.eclipsemc: AXbtPattern = AXbtPattern(client, 'eclipsemc') + self.eightbaochi: AXbtPattern = AXbtPattern(client, 'eightbaochi') + self.ekanembtc: AXbtPattern = AXbtPattern(client, 'ekanembtc') + self.eligius: AXbtPattern = AXbtPattern(client, 'eligius') + self.emcdpool: AXbtPattern = AXbtPattern(client, 'emcdpool') + self.entrustcharitypool: AXbtPattern = AXbtPattern(client, 'entrustcharitypool') + self.eobot: AXbtPattern = AXbtPattern(client, 'eobot') + self.exxbw: AXbtPattern = AXbtPattern(client, 'exxbw') + self.f2pool: AXbtPattern = AXbtPattern(client, 'f2pool') + self.fiftyeightcoin: AXbtPattern = AXbtPattern(client, 'fiftyeightcoin') + self.foundryusa: AXbtPattern = AXbtPattern(client, 'foundryusa') + self.futurebitapollosolo: AXbtPattern = AXbtPattern(client, 'futurebitapollosolo') + self.gbminers: AXbtPattern = AXbtPattern(client, 'gbminers') + self.ghashio: AXbtPattern = AXbtPattern(client, 'ghashio') + self.givemecoins: AXbtPattern = AXbtPattern(client, 'givemecoins') + self.gogreenlight: AXbtPattern = AXbtPattern(client, 'gogreenlight') + self.haozhuzhu: AXbtPattern = AXbtPattern(client, 'haozhuzhu') + self.haominer: AXbtPattern = AXbtPattern(client, 'haominer') + self.hashbx: AXbtPattern = AXbtPattern(client, 'hashbx') + self.hashpool: AXbtPattern = AXbtPattern(client, 'hashpool') + self.helix: AXbtPattern = AXbtPattern(client, 'helix') + self.hhtt: AXbtPattern = AXbtPattern(client, 'hhtt') + self.hotpool: AXbtPattern = AXbtPattern(client, 'hotpool') + self.hummerpool: AXbtPattern = AXbtPattern(client, 'hummerpool') + self.huobipool: AXbtPattern = AXbtPattern(client, 'huobipool') + self.innopolistech: AXbtPattern = AXbtPattern(client, 'innopolistech') + self.kanopool: AXbtPattern = AXbtPattern(client, 'kanopool') + self.kncminer: AXbtPattern = AXbtPattern(client, 'kncminer') + self.kucoinpool: AXbtPattern = AXbtPattern(client, 'kucoinpool') + self.lubiancom: AXbtPattern = AXbtPattern(client, 'lubiancom') + self.luckypool: AXbtPattern = AXbtPattern(client, 'luckypool') + self.luxor: AXbtPattern = AXbtPattern(client, 'luxor') + self.marapool: AXbtPattern = AXbtPattern(client, 'marapool') + self.maxbtc: AXbtPattern = AXbtPattern(client, 'maxbtc') + self.maxipool: AXbtPattern = AXbtPattern(client, 'maxipool') + self.megabigpower: AXbtPattern = AXbtPattern(client, 'megabigpower') + self.minerium: AXbtPattern = AXbtPattern(client, 'minerium') + self.miningcity: AXbtPattern = AXbtPattern(client, 'miningcity') + self.miningdutch: AXbtPattern = AXbtPattern(client, 'miningdutch') + self.miningkings: AXbtPattern = AXbtPattern(client, 'miningkings') + self.miningsquared: AXbtPattern = AXbtPattern(client, 'miningsquared') + self.mmpool: AXbtPattern = AXbtPattern(client, 'mmpool') + self.mtred: AXbtPattern = AXbtPattern(client, 'mtred') + self.multicoinco: AXbtPattern = AXbtPattern(client, 'multicoinco') + self.multipool: AXbtPattern = AXbtPattern(client, 'multipool') + self.mybtccoinpool: AXbtPattern = AXbtPattern(client, 'mybtccoinpool') + self.neopool: AXbtPattern = AXbtPattern(client, 'neopool') + self.nexious: AXbtPattern = AXbtPattern(client, 'nexious') + self.nicehash: AXbtPattern = AXbtPattern(client, 'nicehash') + self.nmcbit: AXbtPattern = AXbtPattern(client, 'nmcbit') + self.novablock: AXbtPattern = AXbtPattern(client, 'novablock') + self.ocean: AXbtPattern = AXbtPattern(client, 'ocean') + self.okexpool: AXbtPattern = AXbtPattern(client, 'okexpool') + self.okminer: AXbtPattern = AXbtPattern(client, 'okminer') + self.okkong: AXbtPattern = AXbtPattern(client, 'okkong') + self.okpooltop: AXbtPattern = AXbtPattern(client, 'okpooltop') + self.onehash: AXbtPattern = AXbtPattern(client, 'onehash') + self.onem1x: AXbtPattern = AXbtPattern(client, 'onem1x') + self.onethash: AXbtPattern = AXbtPattern(client, 'onethash') + self.ozcoin: AXbtPattern = AXbtPattern(client, 'ozcoin') + self.phashio: AXbtPattern = AXbtPattern(client, 'phashio') + self.parasite: AXbtPattern = AXbtPattern(client, 'parasite') + self.patels: AXbtPattern = AXbtPattern(client, 'patels') + self.pegapool: AXbtPattern = AXbtPattern(client, 'pegapool') + self.phoenix: AXbtPattern = AXbtPattern(client, 'phoenix') + self.polmine: AXbtPattern = AXbtPattern(client, 'polmine') + self.pool175btc: AXbtPattern = AXbtPattern(client, 'pool175btc') + self.pool50btc: AXbtPattern = AXbtPattern(client, 'pool50btc') + self.poolin: AXbtPattern = AXbtPattern(client, 'poolin') + self.portlandhodl: AXbtPattern = AXbtPattern(client, 'portlandhodl') + self.publicpool: AXbtPattern = AXbtPattern(client, 'publicpool') + self.purebtccom: AXbtPattern = AXbtPattern(client, 'purebtccom') + self.rawpool: AXbtPattern = AXbtPattern(client, 'rawpool') + self.rigpool: AXbtPattern = AXbtPattern(client, 'rigpool') + self.sbicrypto: AXbtPattern = AXbtPattern(client, 'sbicrypto') + self.secpool: AXbtPattern = AXbtPattern(client, 'secpool') + self.secretsuperstar: AXbtPattern = AXbtPattern(client, 'secretsuperstar') + self.sevenpool: AXbtPattern = AXbtPattern(client, 'sevenpool') + self.shawnp0wers: AXbtPattern = AXbtPattern(client, 'shawnp0wers') + self.sigmapoolcom: AXbtPattern = AXbtPattern(client, 'sigmapoolcom') + self.simplecoinus: AXbtPattern = AXbtPattern(client, 'simplecoinus') + self.solock: AXbtPattern = AXbtPattern(client, 'solock') + self.spiderpool: AXbtPattern = AXbtPattern(client, 'spiderpool') + self.stminingcorp: AXbtPattern = AXbtPattern(client, 'stminingcorp') + self.tangpool: AXbtPattern = AXbtPattern(client, 'tangpool') + self.tatmaspool: AXbtPattern = AXbtPattern(client, 'tatmaspool') + self.tbdice: AXbtPattern = AXbtPattern(client, 'tbdice') + self.telco214: AXbtPattern = AXbtPattern(client, 'telco214') + self.terrapool: AXbtPattern = AXbtPattern(client, 'terrapool') + self.tiger: AXbtPattern = AXbtPattern(client, 'tiger') + self.tigerpoolnet: AXbtPattern = AXbtPattern(client, 'tigerpoolnet') + self.titan: AXbtPattern = AXbtPattern(client, 'titan') + self.transactioncoinmining: AXbtPattern = AXbtPattern(client, 'transactioncoinmining') + self.trickysbtcpool: AXbtPattern = AXbtPattern(client, 'trickysbtcpool') + self.triplemining: AXbtPattern = AXbtPattern(client, 'triplemining') + self.twentyoneinc: AXbtPattern = AXbtPattern(client, 'twentyoneinc') + self.ultimuspool: AXbtPattern = AXbtPattern(client, 'ultimuspool') + self.unknown: AXbtPattern = AXbtPattern(client, 'unknown') + self.unomp: AXbtPattern = AXbtPattern(client, 'unomp') + self.viabtc: AXbtPattern = AXbtPattern(client, 'viabtc') + self.waterhole: AXbtPattern = AXbtPattern(client, 'waterhole') + self.wayicn: AXbtPattern = AXbtPattern(client, 'wayicn') + self.whitepool: AXbtPattern = AXbtPattern(client, 'whitepool') + self.wk057: AXbtPattern = AXbtPattern(client, 'wk057') + self.yourbtcnet: AXbtPattern = AXbtPattern(client, 'yourbtcnet') + self.zulupool: AXbtPattern = AXbtPattern(client, 'zulupool') + +class CatalogTree_Computed_Positions: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.position: MetricPattern16[BlkPosition] = MetricPattern16(client, f'{base_path}_position') + +class CatalogTree_Computed_Price: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.ohlc: CatalogTree_Computed_Price_Ohlc = CatalogTree_Computed_Price_Ohlc(client, f'{base_path}_ohlc') + self.sats: CatalogTree_Computed_Price_Sats = CatalogTree_Computed_Price_Sats(client, f'{base_path}_sats') + self.usd: CatalogTree_Computed_Price_Usd = CatalogTree_Computed_Price_Usd(client, f'{base_path}_usd') + +class CatalogTree_Computed_Price_Ohlc: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.ohlc_in_cents: MetricPattern9[OHLCCents] = MetricPattern9(client, f'{base_path}_ohlc_in_cents') + +class CatalogTree_Computed_Price_Sats: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.price_close_in_sats: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_price_close_in_sats') + self.price_high_in_sats: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_price_high_in_sats') + self.price_low_in_sats: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_price_low_in_sats') + self.price_ohlc_in_sats: MetricPattern1[OHLCSats] = MetricPattern1(client, f'{base_path}_price_ohlc_in_sats') + self.price_open_in_sats: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_price_open_in_sats') + +class CatalogTree_Computed_Price_Usd: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.price_close: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_price_close') + self.price_close_in_cents: MetricPattern9[Cents] = MetricPattern9(client, f'{base_path}_price_close_in_cents') + self.price_high: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_price_high') + self.price_high_in_cents: MetricPattern9[Cents] = MetricPattern9(client, f'{base_path}_price_high_in_cents') + self.price_low: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_price_low') + self.price_low_in_cents: MetricPattern9[Cents] = MetricPattern9(client, f'{base_path}_price_low_in_cents') + self.price_ohlc: MetricPattern1[OHLCDollars] = MetricPattern1(client, f'{base_path}_price_ohlc') + self.price_open: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_price_open') + self.price_open_in_cents: MetricPattern9[Cents] = MetricPattern9(client, f'{base_path}_price_open_in_cents') + +class CatalogTree_Computed_Scripts: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.count: CatalogTree_Computed_Scripts_Count = CatalogTree_Computed_Scripts_Count(client, f'{base_path}_count') + self.value: CatalogTree_Computed_Scripts_Value = CatalogTree_Computed_Scripts_Value(client, f'{base_path}_value') + +class CatalogTree_Computed_Scripts_Count: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.emptyoutput_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'emptyoutput_count') + self.opreturn_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'opreturn_count') + self.p2a_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2a_count') + self.p2ms_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2ms_count') + self.p2pk33_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2pk33_count') + self.p2pk65_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2pk65_count') + self.p2pkh_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2pkh_count') + self.p2sh_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2sh_count') + self.p2tr_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2tr_count') + self.p2wpkh_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2wpkh_count') + self.p2wsh_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2wsh_count') + self.segwit_adoption: SegwitAdoptionPattern[StoredF32] = SegwitAdoptionPattern(client, 'segwit_adoption') + self.segwit_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'segwit_count') + self.taproot_adoption: SegwitAdoptionPattern[StoredF32] = SegwitAdoptionPattern(client, 'taproot_adoption') + self.unknownoutput_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'unknownoutput_count') + +class CatalogTree_Computed_Scripts_Value: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.opreturn_value: CatalogTree_Computed_Scripts_Value_OpreturnValue = CatalogTree_Computed_Scripts_Value_OpreturnValue(client, f'{base_path}_opreturn_value') + +class CatalogTree_Computed_Scripts_Value_OpreturnValue: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.base: MetricPattern25[Sats] = MetricPattern25(client, f'{base_path}_base') + self.bitcoin: SegwitAdoptionPattern[Bitcoin] = SegwitAdoptionPattern(client, 'opreturn_value_btc') + self.dollars: SegwitAdoptionPattern[Dollars] = SegwitAdoptionPattern(client, 'opreturn_value_usd') + self.sats: CatalogTree_Computed_Scripts_Value_OpreturnValue_Sats = CatalogTree_Computed_Scripts_Value_OpreturnValue_Sats(client, f'{base_path}_sats') + +class CatalogTree_Computed_Scripts_Value_OpreturnValue_Sats: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.average: MetricPattern2[Sats] = MetricPattern2(client, f'{base_path}_average') + self.cumulative: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_cumulative') + self.max: MetricPattern2[Sats] = MetricPattern2(client, f'{base_path}_max') + self.min: MetricPattern2[Sats] = MetricPattern2(client, f'{base_path}_min') + self.sum: MetricPattern2[Sats] = MetricPattern2(client, f'{base_path}_sum') + +class CatalogTree_Computed_Supply: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.burned: CatalogTree_Computed_Supply_Burned = CatalogTree_Computed_Supply_Burned(client, f'{base_path}_burned') + self.circulating: CatalogTree_Computed_Supply_Circulating = CatalogTree_Computed_Supply_Circulating(client, f'{base_path}_circulating') + self.inflation: CatalogTree_Computed_Supply_Inflation = CatalogTree_Computed_Supply_Inflation(client, f'{base_path}_inflation') + self.market_cap: CatalogTree_Computed_Supply_MarketCap = CatalogTree_Computed_Supply_MarketCap(client, f'{base_path}_market_cap') + self.velocity: CatalogTree_Computed_Supply_Velocity = CatalogTree_Computed_Supply_Velocity(client, f'{base_path}_velocity') + +class CatalogTree_Computed_Supply_Burned: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.opreturn: OpreturnPattern = OpreturnPattern(client, 'opreturn_supply') + self.unspendable: OpreturnPattern = OpreturnPattern(client, 'unspendable_supply') + +class CatalogTree_Computed_Supply_Circulating: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.btc: MetricPattern25[Bitcoin] = MetricPattern25(client, f'{base_path}_btc') + self.indexes: ActiveSupplyPattern = ActiveSupplyPattern(client, 'circulating') + self.sats: MetricPattern25[Sats] = MetricPattern25(client, f'{base_path}_sats') + self.usd: MetricPattern25[Dollars] = MetricPattern25(client, f'{base_path}_usd') + +class CatalogTree_Computed_Supply_Inflation: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.indexes: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_indexes') + +class CatalogTree_Computed_Supply_MarketCap: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.height: MetricPattern25[Dollars] = MetricPattern25(client, f'{base_path}_height') + self.indexes: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_indexes') + +class CatalogTree_Computed_Supply_Velocity: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.btc: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_btc') + self.usd: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_usd') + +class CatalogTree_Computed_Transactions: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.count: CatalogTree_Computed_Transactions_Count = CatalogTree_Computed_Transactions_Count(client, f'{base_path}_count') + self.fees: CatalogTree_Computed_Transactions_Fees = CatalogTree_Computed_Transactions_Fees(client, f'{base_path}_fees') + self.size: CatalogTree_Computed_Transactions_Size = CatalogTree_Computed_Transactions_Size(client, f'{base_path}_size') + self.versions: CatalogTree_Computed_Transactions_Versions = CatalogTree_Computed_Transactions_Versions(client, f'{base_path}_versions') + self.volume: CatalogTree_Computed_Transactions_Volume = CatalogTree_Computed_Transactions_Volume(client, f'{base_path}_volume') + +class CatalogTree_Computed_Transactions_Count: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.is_coinbase: MetricPattern41[StoredBool] = MetricPattern41(client, f'{base_path}_is_coinbase') + self.tx_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'tx_count') + +class CatalogTree_Computed_Transactions_Fees: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.fee: CatalogTree_Computed_Transactions_Fees_Fee = CatalogTree_Computed_Transactions_Fees_Fee(client, f'{base_path}_fee') + self.fee_rate: CatalogTree_Computed_Transactions_Fees_FeeRate = CatalogTree_Computed_Transactions_Fees_FeeRate(client, f'{base_path}_fee_rate') + self.input_value: MetricPattern41[Sats] = MetricPattern41(client, f'{base_path}_input_value') + self.output_value: MetricPattern41[Sats] = MetricPattern41(client, f'{base_path}_output_value') + +class CatalogTree_Computed_Transactions_Fees_Fee: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.base: MetricPattern41[Sats] = MetricPattern41(client, f'{base_path}_base') + self.bitcoin: BlockSizePattern[Bitcoin] = BlockSizePattern(client, 'fee_btc') + self.bitcoin_txindex: MetricPattern41[Bitcoin] = MetricPattern41(client, f'{base_path}_bitcoin_txindex') + self.dollars: BlockSizePattern[Dollars] = BlockSizePattern(client, 'fee_usd') + self.dollars_txindex: MetricPattern41[Dollars] = MetricPattern41(client, f'{base_path}_dollars_txindex') + self.sats: BlockSizePattern[Sats] = BlockSizePattern(client, 'fee') + +class CatalogTree_Computed_Transactions_Fees_FeeRate: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.average: MetricPattern1[FeeRate] = MetricPattern1(client, f'{base_path}_average') + self.base: MetricPattern41[FeeRate] = MetricPattern41(client, f'{base_path}_base') + self.max: MetricPattern1[FeeRate] = MetricPattern1(client, f'{base_path}_max') + self.median: MetricPattern25[FeeRate] = MetricPattern25(client, f'{base_path}_median') + self.min: MetricPattern1[FeeRate] = MetricPattern1(client, f'{base_path}_min') + self.pct10: MetricPattern25[FeeRate] = MetricPattern25(client, f'{base_path}_pct10') + self.pct25: MetricPattern25[FeeRate] = MetricPattern25(client, f'{base_path}_pct25') + self.pct75: MetricPattern25[FeeRate] = MetricPattern25(client, f'{base_path}_pct75') + self.pct90: MetricPattern25[FeeRate] = MetricPattern25(client, f'{base_path}_pct90') + +class CatalogTree_Computed_Transactions_Size: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.tx_vsize: BlockIntervalPattern[VSize] = BlockIntervalPattern(client, 'tx_vsize') + self.tx_weight: BlockIntervalPattern[Weight] = BlockIntervalPattern(client, 'tx_weight') + self.vsize: MetricPattern41[VSize] = MetricPattern41(client, f'{base_path}_vsize') + self.weight: MetricPattern41[Weight] = MetricPattern41(client, f'{base_path}_weight') + +class CatalogTree_Computed_Transactions_Versions: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.tx_v1: BlockCountPattern[StoredU64] = BlockCountPattern(client, 'tx_v1') + self.tx_v2: BlockCountPattern[StoredU64] = BlockCountPattern(client, 'tx_v2') + self.tx_v3: BlockCountPattern[StoredU64] = BlockCountPattern(client, 'tx_v3') + +class CatalogTree_Computed_Transactions_Volume: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.annualized_volume: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_annualized_volume') + self.annualized_volume_btc: MetricPattern4[Bitcoin] = MetricPattern4(client, f'{base_path}_annualized_volume_btc') + self.annualized_volume_usd: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_annualized_volume_usd') + self.inputs_per_sec: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_inputs_per_sec') + self.outputs_per_sec: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_outputs_per_sec') + self.sent_sum: CatalogTree_Computed_Transactions_Volume_SentSum = CatalogTree_Computed_Transactions_Volume_SentSum(client, f'{base_path}_sent_sum') + self.tx_per_sec: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_tx_per_sec') + +class CatalogTree_Computed_Transactions_Volume_SentSum: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.bitcoin: TotalRealizedPnlPattern[Bitcoin] = TotalRealizedPnlPattern(client, 'sent_sum_btc') + self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_dollars') + self.sats: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_sats') + +class CatalogTree_Indexed: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.address: CatalogTree_Indexed_Address = CatalogTree_Indexed_Address(client, f'{base_path}_address') + self.block: CatalogTree_Indexed_Block = CatalogTree_Indexed_Block(client, f'{base_path}_block') + self.output: CatalogTree_Indexed_Output = CatalogTree_Indexed_Output(client, f'{base_path}_output') + self.tx: CatalogTree_Indexed_Tx = CatalogTree_Indexed_Tx(client, f'{base_path}_tx') + self.txin: CatalogTree_Indexed_Txin = CatalogTree_Indexed_Txin(client, f'{base_path}_txin') + self.txout: CatalogTree_Indexed_Txout = CatalogTree_Indexed_Txout(client, f'{base_path}_txout') + +class CatalogTree_Indexed_Address: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.first_p2aaddressindex: MetricPattern25[P2AAddressIndex] = MetricPattern25(client, f'{base_path}_first_p2aaddressindex') + self.first_p2pk33addressindex: MetricPattern25[P2PK33AddressIndex] = MetricPattern25(client, f'{base_path}_first_p2pk33addressindex') + self.first_p2pk65addressindex: MetricPattern25[P2PK65AddressIndex] = MetricPattern25(client, f'{base_path}_first_p2pk65addressindex') + self.first_p2pkhaddressindex: MetricPattern25[P2PKHAddressIndex] = MetricPattern25(client, f'{base_path}_first_p2pkhaddressindex') + self.first_p2shaddressindex: MetricPattern25[P2SHAddressIndex] = MetricPattern25(client, f'{base_path}_first_p2shaddressindex') + self.first_p2traddressindex: MetricPattern25[P2TRAddressIndex] = MetricPattern25(client, f'{base_path}_first_p2traddressindex') + self.first_p2wpkhaddressindex: MetricPattern25[P2WPKHAddressIndex] = MetricPattern25(client, f'{base_path}_first_p2wpkhaddressindex') + self.first_p2wshaddressindex: MetricPattern25[P2WSHAddressIndex] = MetricPattern25(client, f'{base_path}_first_p2wshaddressindex') + self.p2abytes: MetricPattern30[P2ABytes] = MetricPattern30(client, f'{base_path}_p2abytes') + self.p2pk33bytes: MetricPattern32[P2PK33Bytes] = MetricPattern32(client, f'{base_path}_p2pk33bytes') + self.p2pk65bytes: MetricPattern33[P2PK65Bytes] = MetricPattern33(client, f'{base_path}_p2pk65bytes') + self.p2pkhbytes: MetricPattern34[P2PKHBytes] = MetricPattern34(client, f'{base_path}_p2pkhbytes') + self.p2shbytes: MetricPattern35[P2SHBytes] = MetricPattern35(client, f'{base_path}_p2shbytes') + self.p2trbytes: MetricPattern36[P2TRBytes] = MetricPattern36(client, f'{base_path}_p2trbytes') + self.p2wpkhbytes: MetricPattern37[P2WPKHBytes] = MetricPattern37(client, f'{base_path}_p2wpkhbytes') + self.p2wshbytes: MetricPattern38[P2WSHBytes] = MetricPattern38(client, f'{base_path}_p2wshbytes') + +class CatalogTree_Indexed_Block: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.blockhash: MetricPattern25[BlockHash] = MetricPattern25(client, f'{base_path}_blockhash') + self.difficulty: MetricPattern25[StoredF64] = MetricPattern25(client, f'{base_path}_difficulty') + self.timestamp: MetricPattern25[Timestamp] = MetricPattern25(client, f'{base_path}_timestamp') + self.total_size: MetricPattern25[StoredU64] = MetricPattern25(client, f'{base_path}_total_size') + self.weight: MetricPattern25[Weight] = MetricPattern25(client, f'{base_path}_weight') + +class CatalogTree_Indexed_Output: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.first_emptyoutputindex: MetricPattern25[EmptyOutputIndex] = MetricPattern25(client, f'{base_path}_first_emptyoutputindex') + self.first_opreturnindex: MetricPattern25[OpReturnIndex] = MetricPattern25(client, f'{base_path}_first_opreturnindex') + self.first_p2msoutputindex: MetricPattern25[P2MSOutputIndex] = MetricPattern25(client, f'{base_path}_first_p2msoutputindex') + self.first_unknownoutputindex: MetricPattern25[UnknownOutputIndex] = MetricPattern25(client, f'{base_path}_first_unknownoutputindex') + self.txindex: MetricPattern7[TxIndex] = MetricPattern7(client, f'{base_path}_txindex') + +class CatalogTree_Indexed_Tx: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.base_size: MetricPattern41[StoredU32] = MetricPattern41(client, f'{base_path}_base_size') + self.first_txindex: MetricPattern25[TxIndex] = MetricPattern25(client, f'{base_path}_first_txindex') + self.first_txinindex: MetricPattern41[TxInIndex] = MetricPattern41(client, f'{base_path}_first_txinindex') + self.first_txoutindex: MetricPattern41[TxOutIndex] = MetricPattern41(client, f'{base_path}_first_txoutindex') + self.height: MetricPattern41[Height] = MetricPattern41(client, f'{base_path}_height') + self.is_explicitly_rbf: MetricPattern41[StoredBool] = MetricPattern41(client, f'{base_path}_is_explicitly_rbf') + self.rawlocktime: MetricPattern41[RawLockTime] = MetricPattern41(client, f'{base_path}_rawlocktime') + self.total_size: MetricPattern41[StoredU32] = MetricPattern41(client, f'{base_path}_total_size') + self.txid: MetricPattern41[Txid] = MetricPattern41(client, f'{base_path}_txid') + self.txversion: MetricPattern41[TxVersion] = MetricPattern41(client, f'{base_path}_txversion') + +class CatalogTree_Indexed_Txin: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.first_txinindex: MetricPattern25[TxInIndex] = MetricPattern25(client, f'{base_path}_first_txinindex') + self.outpoint: MetricPattern26[OutPoint] = MetricPattern26(client, f'{base_path}_outpoint') + self.outputtype: MetricPattern26[OutputType] = MetricPattern26(client, f'{base_path}_outputtype') + self.txindex: MetricPattern26[TxIndex] = MetricPattern26(client, f'{base_path}_txindex') + self.typeindex: MetricPattern26[TypeIndex] = MetricPattern26(client, f'{base_path}_typeindex') + +class CatalogTree_Indexed_Txout: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.first_txoutindex: MetricPattern25[TxOutIndex] = MetricPattern25(client, f'{base_path}_first_txoutindex') + self.outputtype: MetricPattern29[OutputType] = MetricPattern29(client, f'{base_path}_outputtype') + self.txindex: MetricPattern29[TxIndex] = MetricPattern29(client, f'{base_path}_txindex') + self.typeindex: MetricPattern29[TypeIndex] = MetricPattern29(client, f'{base_path}_typeindex') + self.value: MetricPattern29[Sats] = MetricPattern29(client, f'{base_path}_value') + +class BrkClient(BrkClientBase): + """Main BRK client with catalog tree and API methods.""" + + VERSION = "v0.1.0-alpha.1" + + INDEXES = [ + "dateindex", + "decadeindex", + "difficultyepoch", + "emptyoutputindex", + "halvingepoch", + "height", + "txinindex", + "monthindex", + "opreturnindex", + "txoutindex", + "p2aaddressindex", + "p2msoutputindex", + "p2pk33addressindex", + "p2pk65addressindex", + "p2pkhaddressindex", + "p2shaddressindex", + "p2traddressindex", + "p2wpkhaddressindex", + "p2wshaddressindex", + "quarterindex", + "semesterindex", + "txindex", + "unknownoutputindex", + "weekindex", + "yearindex", + "loadedaddressindex", + "emptyaddressindex" + ] + + POOL_ID_TO_POOL_NAME = { + "aaopool": "AAO Pool", + "antpool": "AntPool", + "arkpool": "ArkPool", + "asicminer": "ASICMiner", + "axbt": "A-XBT", + "batpool": "BATPOOL", + "bcmonster": "BCMonster", + "bcpoolio": "bcpool.io", + "binancepool": "Binance Pool", + "bitalo": "Bitalo", + "bitclub": "BitClub", + "bitcoinaffiliatenetwork": "Bitcoin Affiliate Network", + "bitcoincom": "Bitcoin.com", + "bitcoinindia": "Bitcoin India", + "bitcoinrussia": "BitcoinRussia", + "bitcoinukraine": "Bitcoin-Ukraine", + "bitfarms": "Bitfarms", + "bitfufupool": "BitFuFuPool", + "bitfury": "BitFury", + "bitminter": "BitMinter", + "bitparking": "Bitparking", + "bitsolo": "Bitsolo", + "bixin": "Bixin", + "blockfills": "BlockFills", + "braiinspool": "Braiins Pool", + "bravomining": "Bravo Mining", + "btcc": "BTCC", + "btccom": "BTC.com", + "btcdig": "BTCDig", + "btcguild": "BTC Guild", + "btclab": "BTCLab", + "btcmp": "BTCMP", + "btcnuggets": "BTC Nuggets", + "btcpoolparty": "BTC Pool Party", + "btcserv": "BTCServ", + "btctop": "BTC.TOP", + "btpool": "BTPOOL", + "bwpool": "BWPool", + "bytepool": "BytePool", + "canoe": "CANOE", + "canoepool": "CanoePool", + "carbonnegative": "Carbon Negative", + "ckpool": "CKPool", + "cloudhashing": "CloudHashing", + "coinlab": "CoinLab", + "cointerra": "Cointerra", + "connectbtc": "ConnectBTC", + "dcex": "DCEX", + "dcexploration": "DCExploration", + "digitalbtc": "digitalBTC", + "digitalxmintsy": "digitalX Mintsy", + "dpool": "DPOOL", + "eclipsemc": "EclipseMC", + "eightbaochi": "8baochi", + "ekanembtc": "EkanemBTC", + "eligius": "Eligius", + "emcdpool": "EMCDPool", + "entrustcharitypool": "Entrust Charity Pool", + "eobot": "Eobot", + "exxbw": "EXX&BW", + "f2pool": "F2Pool", + "fiftyeightcoin": "58COIN", + "foundryusa": "Foundry USA", + "futurebitapollosolo": "FutureBit Apollo Solo", + "gbminers": "GBMiners", + "ghashio": "GHash.IO", + "givemecoins": "Give Me Coins", + "gogreenlight": "GoGreenLight", + "haominer": "haominer", + "haozhuzhu": "HAOZHUZHU", + "hashbx": "HashBX", + "hashpool": "HASHPOOL", + "helix": "Helix", + "hhtt": "HHTT", + "hotpool": "HotPool", + "hummerpool": "Hummerpool", + "huobipool": "Huobi.pool", + "innopolistech": "Innopolis Tech", + "kanopool": "KanoPool", + "kncminer": "KnCMiner", + "kucoinpool": "KuCoinPool", + "lubiancom": "Lubian.com", + "luckypool": "luckyPool", + "luxor": "Luxor", + "marapool": "MARA Pool", + "maxbtc": "MaxBTC", + "maxipool": "MaxiPool", + "megabigpower": "MegaBigPower", + "minerium": "Minerium", + "miningcity": "MiningCity", + "miningdutch": "Mining-Dutch", + "miningkings": "MiningKings", + "miningsquared": "Mining Squared", + "mmpool": "mmpool", + "mtred": "Mt Red", + "multicoinco": "MultiCoin.co", + "multipool": "Multipool", + "mybtccoinpool": "myBTCcoin Pool", + "neopool": "Neopool", + "nexious": "Nexious", + "nicehash": "NiceHash", + "nmcbit": "NMCbit", + "novablock": "NovaBlock", + "ocean": "OCEAN", + "okexpool": "OKExPool", + "okkong": "OKKONG", + "okminer": "OKMINER", + "okpooltop": "okpool.top", + "onehash": "1Hash", + "onem1x": "1M1X", + "onethash": "1THash", + "ozcoin": "OzCoin", + "parasite": "Parasite", + "patels": "Patels", + "pegapool": "PEGA Pool", + "phashio": "PHash.IO", + "phoenix": "Phoenix", + "polmine": "Polmine", + "pool175btc": "175btc", + "pool50btc": "50BTC", + "poolin": "Poolin", + "portlandhodl": "Portland.HODL", + "publicpool": "Public Pool", + "purebtccom": "PureBTC.COM", + "rawpool": "Rawpool", + "rigpool": "RigPool", + "sbicrypto": "SBI Crypto", + "secpool": "SECPOOL", + "secretsuperstar": "SecretSuperstar", + "sevenpool": "7pool", + "shawnp0wers": "shawnp0wers", + "sigmapoolcom": "Sigmapool.com", + "simplecoinus": "simplecoin.us", + "solock": "Solo CK", + "spiderpool": "SpiderPool", + "stminingcorp": "ST Mining Corp", + "tangpool": "Tangpool", + "tatmaspool": "TATMAS Pool", + "tbdice": "TBDice", + "telco214": "Telco 214", + "terrapool": "Terra Pool", + "tiger": "tiger", + "tigerpoolnet": "tigerpool.net", + "titan": "Titan", + "transactioncoinmining": "transactioncoinmining", + "trickysbtcpool": "Tricky's BTC Pool", + "triplemining": "TripleMining", + "twentyoneinc": "21 Inc.", + "ultimuspool": "ULTIMUSPOOL", + "unknown": "Unknown", + "unomp": "UNOMP", + "viabtc": "ViaBTC", + "waterhole": "Waterhole", + "wayicn": "WAYI.CN", + "whitepool": "WhitePool", + "wk057": "wk057", + "yourbtcnet": "Yourbtc.net", + "zulupool": "Zulupool" + } + + TERM_NAMES = { + "short": { + "id": "sth", + "short": "STH", + "long": "Short Term Holders" + }, + "long": { + "id": "lth", + "short": "LTH", + "long": "Long Term Holders" + } + } + + EPOCH_NAMES = { + "_0": { + "id": "epoch_0", + "short": "Epoch 0", + "long": "Epoch 0" + }, + "_1": { + "id": "epoch_1", + "short": "Epoch 1", + "long": "Epoch 1" + }, + "_2": { + "id": "epoch_2", + "short": "Epoch 2", + "long": "Epoch 2" + }, + "_3": { + "id": "epoch_3", + "short": "Epoch 3", + "long": "Epoch 3" + }, + "_4": { + "id": "epoch_4", + "short": "Epoch 4", + "long": "Epoch 4" + } + } + + YEAR_NAMES = { + "_2009": { + "id": "year_2009", + "short": "2009", + "long": "Year 2009" + }, + "_2010": { + "id": "year_2010", + "short": "2010", + "long": "Year 2010" + }, + "_2011": { + "id": "year_2011", + "short": "2011", + "long": "Year 2011" + }, + "_2012": { + "id": "year_2012", + "short": "2012", + "long": "Year 2012" + }, + "_2013": { + "id": "year_2013", + "short": "2013", + "long": "Year 2013" + }, + "_2014": { + "id": "year_2014", + "short": "2014", + "long": "Year 2014" + }, + "_2015": { + "id": "year_2015", + "short": "2015", + "long": "Year 2015" + }, + "_2016": { + "id": "year_2016", + "short": "2016", + "long": "Year 2016" + }, + "_2017": { + "id": "year_2017", + "short": "2017", + "long": "Year 2017" + }, + "_2018": { + "id": "year_2018", + "short": "2018", + "long": "Year 2018" + }, + "_2019": { + "id": "year_2019", + "short": "2019", + "long": "Year 2019" + }, + "_2020": { + "id": "year_2020", + "short": "2020", + "long": "Year 2020" + }, + "_2021": { + "id": "year_2021", + "short": "2021", + "long": "Year 2021" + }, + "_2022": { + "id": "year_2022", + "short": "2022", + "long": "Year 2022" + }, + "_2023": { + "id": "year_2023", + "short": "2023", + "long": "Year 2023" + }, + "_2024": { + "id": "year_2024", + "short": "2024", + "long": "Year 2024" + }, + "_2025": { + "id": "year_2025", + "short": "2025", + "long": "Year 2025" + }, + "_2026": { + "id": "year_2026", + "short": "2026", + "long": "Year 2026" + } + } + + SPENDABLE_TYPE_NAMES = { + "p2pk65": { + "id": "p2pk65", + "short": "P2PK65", + "long": "Pay to Public Key (65 bytes)" + }, + "p2pk33": { + "id": "p2pk33", + "short": "P2PK33", + "long": "Pay to Public Key (33 bytes)" + }, + "p2pkh": { + "id": "p2pkh", + "short": "P2PKH", + "long": "Pay to Public Key Hash" + }, + "p2ms": { + "id": "p2ms", + "short": "P2MS", + "long": "Pay to Multisig" + }, + "p2sh": { + "id": "p2sh", + "short": "P2SH", + "long": "Pay to Script Hash" + }, + "p2wpkh": { + "id": "p2wpkh", + "short": "P2WPKH", + "long": "Pay to Witness Public Key Hash" + }, + "p2wsh": { + "id": "p2wsh", + "short": "P2WSH", + "long": "Pay to Witness Script Hash" + }, + "p2tr": { + "id": "p2tr", + "short": "P2TR", + "long": "Pay to Taproot" + }, + "p2a": { + "id": "p2a", + "short": "P2A", + "long": "Pay to Anchor" + }, + "unknown": { + "id": "unknown_outputs", + "short": "Unknown", + "long": "Unknown Output Type" + }, + "empty": { + "id": "empty_outputs", + "short": "Empty", + "long": "Empty Output" + } + } + + AGE_RANGE_NAMES = { + "up_to_1d": { + "id": "up_to_1d_old", + "short": "<1d", + "long": "Up to 1 Day Old" + }, + "_1d_to_1w": { + "id": "at_least_1d_up_to_1w_old", + "short": "1d-1w", + "long": "1 Day to 1 Week Old" + }, + "_1w_to_1m": { + "id": "at_least_1w_up_to_1m_old", + "short": "1w-1m", + "long": "1 Week to 1 Month Old" + }, + "_1m_to_2m": { + "id": "at_least_1m_up_to_2m_old", + "short": "1m-2m", + "long": "1 to 2 Months Old" + }, + "_2m_to_3m": { + "id": "at_least_2m_up_to_3m_old", + "short": "2m-3m", + "long": "2 to 3 Months Old" + }, + "_3m_to_4m": { + "id": "at_least_3m_up_to_4m_old", + "short": "3m-4m", + "long": "3 to 4 Months Old" + }, + "_4m_to_5m": { + "id": "at_least_4m_up_to_5m_old", + "short": "4m-5m", + "long": "4 to 5 Months Old" + }, + "_5m_to_6m": { + "id": "at_least_5m_up_to_6m_old", + "short": "5m-6m", + "long": "5 to 6 Months Old" + }, + "_6m_to_1y": { + "id": "at_least_6m_up_to_1y_old", + "short": "6m-1y", + "long": "6 Months to 1 Year Old" + }, + "_1y_to_2y": { + "id": "at_least_1y_up_to_2y_old", + "short": "1y-2y", + "long": "1 to 2 Years Old" + }, + "_2y_to_3y": { + "id": "at_least_2y_up_to_3y_old", + "short": "2y-3y", + "long": "2 to 3 Years Old" + }, + "_3y_to_4y": { + "id": "at_least_3y_up_to_4y_old", + "short": "3y-4y", + "long": "3 to 4 Years Old" + }, + "_4y_to_5y": { + "id": "at_least_4y_up_to_5y_old", + "short": "4y-5y", + "long": "4 to 5 Years Old" + }, + "_5y_to_6y": { + "id": "at_least_5y_up_to_6y_old", + "short": "5y-6y", + "long": "5 to 6 Years Old" + }, + "_6y_to_7y": { + "id": "at_least_6y_up_to_7y_old", + "short": "6y-7y", + "long": "6 to 7 Years Old" + }, + "_7y_to_8y": { + "id": "at_least_7y_up_to_8y_old", + "short": "7y-8y", + "long": "7 to 8 Years Old" + }, + "_8y_to_10y": { + "id": "at_least_8y_up_to_10y_old", + "short": "8y-10y", + "long": "8 to 10 Years Old" + }, + "_10y_to_12y": { + "id": "at_least_10y_up_to_12y_old", + "short": "10y-12y", + "long": "10 to 12 Years Old" + }, + "_12y_to_15y": { + "id": "at_least_12y_up_to_15y_old", + "short": "12y-15y", + "long": "12 to 15 Years Old" + }, + "from_15y": { + "id": "at_least_15y_old", + "short": "15y+", + "long": "15+ Years Old" + } + } + + MAX_AGE_NAMES = { + "_1w": { + "id": "up_to_1w_old", + "short": "<1w", + "long": "Up to 1 Week Old" + }, + "_1m": { + "id": "up_to_1m_old", + "short": "<1m", + "long": "Up to 1 Month Old" + }, + "_2m": { + "id": "up_to_2m_old", + "short": "<2m", + "long": "Up to 2 Months Old" + }, + "_3m": { + "id": "up_to_3m_old", + "short": "<3m", + "long": "Up to 3 Months Old" + }, + "_4m": { + "id": "up_to_4m_old", + "short": "<4m", + "long": "Up to 4 Months Old" + }, + "_5m": { + "id": "up_to_5m_old", + "short": "<5m", + "long": "Up to 5 Months Old" + }, + "_6m": { + "id": "up_to_6m_old", + "short": "<6m", + "long": "Up to 6 Months Old" + }, + "_1y": { + "id": "up_to_1y_old", + "short": "<1y", + "long": "Up to 1 Year Old" + }, + "_2y": { + "id": "up_to_2y_old", + "short": "<2y", + "long": "Up to 2 Years Old" + }, + "_3y": { + "id": "up_to_3y_old", + "short": "<3y", + "long": "Up to 3 Years Old" + }, + "_4y": { + "id": "up_to_4y_old", + "short": "<4y", + "long": "Up to 4 Years Old" + }, + "_5y": { + "id": "up_to_5y_old", + "short": "<5y", + "long": "Up to 5 Years Old" + }, + "_6y": { + "id": "up_to_6y_old", + "short": "<6y", + "long": "Up to 6 Years Old" + }, + "_7y": { + "id": "up_to_7y_old", + "short": "<7y", + "long": "Up to 7 Years Old" + }, + "_8y": { + "id": "up_to_8y_old", + "short": "<8y", + "long": "Up to 8 Years Old" + }, + "_10y": { + "id": "up_to_10y_old", + "short": "<10y", + "long": "Up to 10 Years Old" + }, + "_12y": { + "id": "up_to_12y_old", + "short": "<12y", + "long": "Up to 12 Years Old" + }, + "_15y": { + "id": "up_to_15y_old", + "short": "<15y", + "long": "Up to 15 Years Old" + } + } + + MIN_AGE_NAMES = { + "_1d": { + "id": "at_least_1d_old", + "short": "1d+", + "long": "At Least 1 Day Old" + }, + "_1w": { + "id": "at_least_1w_old", + "short": "1w+", + "long": "At Least 1 Week Old" + }, + "_1m": { + "id": "at_least_1m_old", + "short": "1m+", + "long": "At Least 1 Month Old" + }, + "_2m": { + "id": "at_least_2m_old", + "short": "2m+", + "long": "At Least 2 Months Old" + }, + "_3m": { + "id": "at_least_3m_old", + "short": "3m+", + "long": "At Least 3 Months Old" + }, + "_4m": { + "id": "at_least_4m_old", + "short": "4m+", + "long": "At Least 4 Months Old" + }, + "_5m": { + "id": "at_least_5m_old", + "short": "5m+", + "long": "At Least 5 Months Old" + }, + "_6m": { + "id": "at_least_6m_old", + "short": "6m+", + "long": "At Least 6 Months Old" + }, + "_1y": { + "id": "at_least_1y_old", + "short": "1y+", + "long": "At Least 1 Year Old" + }, + "_2y": { + "id": "at_least_2y_old", + "short": "2y+", + "long": "At Least 2 Years Old" + }, + "_3y": { + "id": "at_least_3y_old", + "short": "3y+", + "long": "At Least 3 Years Old" + }, + "_4y": { + "id": "at_least_4y_old", + "short": "4y+", + "long": "At Least 4 Years Old" + }, + "_5y": { + "id": "at_least_5y_old", + "short": "5y+", + "long": "At Least 5 Years Old" + }, + "_6y": { + "id": "at_least_6y_old", + "short": "6y+", + "long": "At Least 6 Years Old" + }, + "_7y": { + "id": "at_least_7y_old", + "short": "7y+", + "long": "At Least 7 Years Old" + }, + "_8y": { + "id": "at_least_8y_old", + "short": "8y+", + "long": "At Least 8 Years Old" + }, + "_10y": { + "id": "at_least_10y_old", + "short": "10y+", + "long": "At Least 10 Years Old" + }, + "_12y": { + "id": "at_least_12y_old", + "short": "12y+", + "long": "At Least 12 Years Old" + } + } + + AMOUNT_RANGE_NAMES = { + "_0sats": { + "id": "with_0sats", + "short": "0 sats", + "long": "0 Sats" + }, + "_1sat_to_10sats": { + "id": "above_1sat_under_10sats", + "short": "1-10 sats", + "long": "1 to 10 Sats" + }, + "_10sats_to_100sats": { + "id": "above_10sats_under_100sats", + "short": "10-100 sats", + "long": "10 to 100 Sats" + }, + "_100sats_to_1k_sats": { + "id": "above_100sats_under_1k_sats", + "short": "100-1k sats", + "long": "100 to 1K Sats" + }, + "_1k_sats_to_10k_sats": { + "id": "above_1k_sats_under_10k_sats", + "short": "1k-10k sats", + "long": "1K to 10K Sats" + }, + "_10k_sats_to_100k_sats": { + "id": "above_10k_sats_under_100k_sats", + "short": "10k-100k sats", + "long": "10K to 100K Sats" + }, + "_100k_sats_to_1m_sats": { + "id": "above_100k_sats_under_1m_sats", + "short": "100k-1M sats", + "long": "100K to 1M Sats" + }, + "_1m_sats_to_10m_sats": { + "id": "above_1m_sats_under_10m_sats", + "short": "1M-10M sats", + "long": "1M to 10M Sats" + }, + "_10m_sats_to_1btc": { + "id": "above_10m_sats_under_1btc", + "short": "0.1-1 BTC", + "long": "0.1 to 1 BTC" + }, + "_1btc_to_10btc": { + "id": "above_1btc_under_10btc", + "short": "1-10 BTC", + "long": "1 to 10 BTC" + }, + "_10btc_to_100btc": { + "id": "above_10btc_under_100btc", + "short": "10-100 BTC", + "long": "10 to 100 BTC" + }, + "_100btc_to_1k_btc": { + "id": "above_100btc_under_1k_btc", + "short": "100-1k BTC", + "long": "100 to 1K BTC" + }, + "_1k_btc_to_10k_btc": { + "id": "above_1k_btc_under_10k_btc", + "short": "1k-10k BTC", + "long": "1K to 10K BTC" + }, + "_10k_btc_to_100k_btc": { + "id": "above_10k_btc_under_100k_btc", + "short": "10k-100k BTC", + "long": "10K to 100K BTC" + }, + "_100k_btc_or_more": { + "id": "above_100k_btc", + "short": "100k+ BTC", + "long": "100K+ BTC" + } + } + + GE_AMOUNT_NAMES = { + "_1sat": { + "id": "above_1sat", + "short": "1+ sats", + "long": "Above 1 Sat" + }, + "_10sats": { + "id": "above_10sats", + "short": "10+ sats", + "long": "Above 10 Sats" + }, + "_100sats": { + "id": "above_100sats", + "short": "100+ sats", + "long": "Above 100 Sats" + }, + "_1k_sats": { + "id": "above_1k_sats", + "short": "1k+ sats", + "long": "Above 1K Sats" + }, + "_10k_sats": { + "id": "above_10k_sats", + "short": "10k+ sats", + "long": "Above 10K Sats" + }, + "_100k_sats": { + "id": "above_100k_sats", + "short": "100k+ sats", + "long": "Above 100K Sats" + }, + "_1m_sats": { + "id": "above_1m_sats", + "short": "1M+ sats", + "long": "Above 1M Sats" + }, + "_10m_sats": { + "id": "above_10m_sats", + "short": "0.1+ BTC", + "long": "Above 0.1 BTC" + }, + "_1btc": { + "id": "above_1btc", + "short": "1+ BTC", + "long": "Above 1 BTC" + }, + "_10btc": { + "id": "above_10btc", + "short": "10+ BTC", + "long": "Above 10 BTC" + }, + "_100btc": { + "id": "above_100btc", + "short": "100+ BTC", + "long": "Above 100 BTC" + }, + "_1k_btc": { + "id": "above_1k_btc", + "short": "1k+ BTC", + "long": "Above 1K BTC" + }, + "_10k_btc": { + "id": "above_10k_btc", + "short": "10k+ BTC", + "long": "Above 10K BTC" + } + } + + LT_AMOUNT_NAMES = { + "_10sats": { + "id": "under_10sats", + "short": "<10 sats", + "long": "Under 10 Sats" + }, + "_100sats": { + "id": "under_100sats", + "short": "<100 sats", + "long": "Under 100 Sats" + }, + "_1k_sats": { + "id": "under_1k_sats", + "short": "<1k sats", + "long": "Under 1K Sats" + }, + "_10k_sats": { + "id": "under_10k_sats", + "short": "<10k sats", + "long": "Under 10K Sats" + }, + "_100k_sats": { + "id": "under_100k_sats", + "short": "<100k sats", + "long": "Under 100K Sats" + }, + "_1m_sats": { + "id": "under_1m_sats", + "short": "<1M sats", + "long": "Under 1M Sats" + }, + "_10m_sats": { + "id": "under_10m_sats", + "short": "<0.1 BTC", + "long": "Under 0.1 BTC" + }, + "_1btc": { + "id": "under_1btc", + "short": "<1 BTC", + "long": "Under 1 BTC" + }, + "_10btc": { + "id": "under_10btc", + "short": "<10 BTC", + "long": "Under 10 BTC" + }, + "_100btc": { + "id": "under_100btc", + "short": "<100 BTC", + "long": "Under 100 BTC" + }, + "_1k_btc": { + "id": "under_1k_btc", + "short": "<1k BTC", + "long": "Under 1K BTC" + }, + "_10k_btc": { + "id": "under_10k_btc", + "short": "<10k BTC", + "long": "Under 10K BTC" + }, + "_100k_btc": { + "id": "under_100k_btc", + "short": "<100k BTC", + "long": "Under 100K BTC" + } + } + + def __init__(self, base_url: str = 'http://localhost:3000', timeout: float = 30.0): + super().__init__(base_url, timeout) + self.tree = CatalogTree(self) + + def get_address(self, address: Address) -> AddressStats: + """Address information. + + Retrieve comprehensive information about a Bitcoin address including balance, transaction history, UTXOs, and estimated investment metrics. Supports all standard Bitcoin address types (P2PKH, P2SH, P2WPKH, P2WSH, P2TR, etc.).""" + return self.get(f'/api/address/{address}') + + def get_address_txs(self, address: Address, after_txid: Optional[str] = None, limit: Optional[int] = None) -> List[Txid]: + """Address transaction IDs. + + Get transaction IDs for an address, newest first. Use after_txid for pagination.""" + params = [] + if after_txid is not None: params.append(f'after_txid={after_txid}') + if limit is not None: params.append(f'limit={limit}') + query = '&'.join(params) + return self.get(f'/api/address/{address}/txs{"?" + query if query else ""}') + + def get_address_txs_chain(self, address: Address, after_txid: Optional[str] = None, limit: Optional[int] = None) -> List[Txid]: + """Address confirmed transactions. + + Get confirmed transaction IDs for an address, 25 per page. Use ?after_txid= for pagination.""" + params = [] + if after_txid is not None: params.append(f'after_txid={after_txid}') + if limit is not None: params.append(f'limit={limit}') + query = '&'.join(params) + return self.get(f'/api/address/{address}/txs/chain{"?" + query if query else ""}') + + def get_address_txs_mempool(self, address: Address) -> List[Txid]: + """Address mempool transactions. + + Get unconfirmed transaction IDs for an address from the mempool (up to 50).""" + return self.get(f'/api/address/{address}/txs/mempool') + + def get_address_utxo(self, address: Address) -> List[Utxo]: + """Address UTXOs. + + Get unspent transaction outputs for an address.""" + return self.get(f'/api/address/{address}/utxo') + + def get_block_height(self, height: Height) -> BlockInfo: + """Block by height. + + Retrieve block information by block height. Returns block metadata including hash, timestamp, difficulty, size, weight, and transaction count.""" + return self.get(f'/api/block-height/{height}') + + def get_block_by_hash(self, hash: BlockHash) -> BlockInfo: + """Block information. + + Retrieve block information by block hash. Returns block metadata including height, timestamp, difficulty, size, weight, and transaction count.""" + return self.get(f'/api/block/{hash}') + + def get_block_by_hash_raw(self, hash: BlockHash) -> List[int]: + """Raw block. + + Returns the raw block data in binary format.""" + return self.get(f'/api/block/{hash}/raw') + + def get_block_by_hash_status(self, hash: BlockHash) -> BlockStatus: + """Block status. + + Retrieve the status of a block. Returns whether the block is in the best chain and, if so, its height and the hash of the next block.""" + return self.get(f'/api/block/{hash}/status') + + def get_block_by_hash_txid_by_index(self, hash: BlockHash, index: TxIndex) -> Txid: + """Transaction ID at index. + + Retrieve a single transaction ID at a specific index within a block. Returns plain text txid.""" + return self.get(f'/api/block/{hash}/txid/{index}') + + def get_block_by_hash_txids(self, hash: BlockHash) -> List[Txid]: + """Block transaction IDs. + + Retrieve all transaction IDs in a block by block hash.""" + return self.get(f'/api/block/{hash}/txids') + + def get_block_by_hash_txs_by_start_index(self, hash: BlockHash, start_index: TxIndex) -> List[Transaction]: + """Block transactions (paginated). + + Retrieve transactions in a block by block hash, starting from the specified index. Returns up to 25 transactions at a time.""" + return self.get(f'/api/block/{hash}/txs/{start_index}') + + def get_blocks(self) -> List[BlockInfo]: + """Recent blocks. + + Retrieve the last 10 blocks. Returns block metadata for each block.""" + return self.get('/api/blocks') + + def get_blocks_by_height(self, height: Height) -> List[BlockInfo]: + """Blocks from height. + + Retrieve up to 10 blocks going backwards from the given height. For example, height=100 returns blocks 100, 99, 98, ..., 91. Height=0 returns only block 0.""" + return self.get(f'/api/blocks/{height}') + + def get_mempool_info(self) -> MempoolInfo: + """Mempool statistics. + + Get current mempool statistics including transaction count, total vsize, and total fees.""" + return self.get('/api/mempool/info') + + def get_mempool_txids(self) -> List[Txid]: + """Mempool transaction IDs. + + Get all transaction IDs currently in the mempool.""" + return self.get('/api/mempool/txids') + + def get_metric(self, metric: Metric) -> List[Index]: + """Get supported indexes for a metric. + + Returns the list of indexes are supported by the specified metric. For example, `realized_price` might be available on dateindex, weekindex, and monthindex.""" + return self.get(f'/api/metric/{metric}') + + def get_metric_by_index(self, metric: Metric, index: Index, from_: Optional[Any] = None, to: Optional[Any] = None, count: Optional[Any] = None, format: Optional[Format] = None) -> MetricData: + """Get metric data. + + Fetch data for a specific metric at the given index. Use query parameters to filter by date range and format (json/csv).""" + params = [] + if from_ is not None: params.append(f'from={from_}') + if to is not None: params.append(f'to={to}') + if count is not None: params.append(f'count={count}') + if format is not None: params.append(f'format={format}') + query = '&'.join(params) + return self.get(f'/api/metric/{metric}/{index}{"?" + query if query else ""}') + + def get_metrics_bulk(self, metrics: Metrics, index: Index, from_: Optional[Any] = None, to: Optional[Any] = None, count: Optional[Any] = None, format: Optional[Format] = None) -> List[MetricData]: + """Bulk metric data. + + Fetch multiple metrics in a single request. Supports filtering by index and date range. Returns an array of MetricData objects.""" + params = [] + params.append(f'metrics={metrics}') + params.append(f'index={index}') + if from_ is not None: params.append(f'from={from_}') + if to is not None: params.append(f'to={to}') + if count is not None: params.append(f'count={count}') + if format is not None: params.append(f'format={format}') + query = '&'.join(params) + return self.get(f'/api/metrics/bulk{"?" + query if query else ""}') + + def get_metrics_catalog(self) -> TreeNode: + """Metrics catalog. + + Returns the complete hierarchical catalog of available metrics organized as a tree structure. Metrics are grouped by categories and subcategories. Best viewed in an interactive JSON viewer (e.g., Firefox's built-in JSON viewer) for easy navigation of the nested structure.""" + return self.get('/api/metrics/catalog') + + def get_metrics_count(self) -> List[MetricCount]: + """Metric count. + + Current metric count""" + return self.get('/api/metrics/count') + + def get_metrics_indexes(self) -> List[IndexInfo]: + """List available indexes. + + Returns all available indexes with their accepted query aliases. Use any alias when querying metrics.""" + return self.get('/api/metrics/indexes') + + def get_metrics_list(self, page: Optional[Any] = None) -> PaginatedMetrics: + """Metrics list. + + Paginated list of available metrics""" + params = [] + if page is not None: params.append(f'page={page}') + query = '&'.join(params) + return self.get(f'/api/metrics/list{"?" + query if query else ""}') + + def get_metrics_search_by_metric(self, metric: Metric, limit: Optional[Limit] = None) -> List[Metric]: + """Search metrics. + + Fuzzy search for metrics by name. Supports partial matches and typos.""" + params = [] + if limit is not None: params.append(f'limit={limit}') + query = '&'.join(params) + return self.get(f'/api/metrics/search/{metric}{"?" + query if query else ""}') + + def get_tx_by_txid(self, txid: Txid) -> Transaction: + """Transaction information. + + Retrieve complete transaction data by transaction ID (txid). Returns the full transaction details including inputs, outputs, and metadata. The transaction data is read directly from the blockchain data files.""" + return self.get(f'/api/tx/{txid}') + + def get_tx_by_txid_hex(self, txid: Txid) -> Hex: + """Transaction hex. + + Retrieve the raw transaction as a hex-encoded string. Returns the serialized transaction in hexadecimal format.""" + return self.get(f'/api/tx/{txid}/hex') + + def get_tx_by_txid_outspend_by_vout(self, txid: Txid, vout: Vout) -> TxOutspend: + """Output spend status. + + Get the spending status of a transaction output. Returns whether the output has been spent and, if so, the spending transaction details.""" + return self.get(f'/api/tx/{txid}/outspend/{vout}') + + def get_tx_by_txid_outspends(self, txid: Txid) -> List[TxOutspend]: + """All output spend statuses. + + Get the spending status of all outputs in a transaction. Returns an array with the spend status for each output.""" + return self.get(f'/api/tx/{txid}/outspends') + + def get_tx_by_txid_status(self, txid: Txid) -> TxStatus: + """Transaction status. + + Retrieve the confirmation status of a transaction. Returns whether the transaction is confirmed and, if so, the block height, hash, and timestamp.""" + return self.get(f'/api/tx/{txid}/status') + + def get_v1_difficulty_adjustment(self) -> DifficultyAdjustment: + """Difficulty adjustment. + + Get current difficulty adjustment information including progress through the current epoch, estimated retarget date, and difficulty change prediction.""" + return self.get('/api/v1/difficulty-adjustment') + + def get_v1_fees_mempool_blocks(self) -> List[MempoolBlock]: + """Projected mempool blocks. + + Get projected blocks from the mempool for fee estimation. Each block contains statistics about transactions that would be included if a block were mined now.""" + return self.get('/api/v1/fees/mempool-blocks') + + def get_v1_fees_recommended(self) -> RecommendedFees: + """Recommended fees. + + Get recommended fee rates for different confirmation targets based on current mempool state.""" + return self.get('/api/v1/fees/recommended') + + def get_v1_mining_blocks_fees_by_time_period(self, time_period: TimePeriod) -> List[BlockFeesEntry]: + """Block fees. + + Get average block fees for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" + return self.get(f'/api/v1/mining/blocks/fees/{time_period}') + + def get_v1_mining_blocks_rewards_by_time_period(self, time_period: TimePeriod) -> List[BlockRewardsEntry]: + """Block rewards. + + Get average block rewards (coinbase = subsidy + fees) for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" + return self.get(f'/api/v1/mining/blocks/rewards/{time_period}') + + def get_v1_mining_blocks_sizes_weights_by_time_period(self, time_period: TimePeriod) -> BlockSizesWeights: + """Block sizes and weights. + + Get average block sizes and weights for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" + return self.get(f'/api/v1/mining/blocks/sizes-weights/{time_period}') + + def get_v1_mining_blocks_timestamp(self, timestamp: Timestamp) -> BlockTimestamp: + """Block by timestamp. + + Find the block closest to a given UNIX timestamp.""" + return self.get(f'/api/v1/mining/blocks/timestamp/{timestamp}') + + def get_v1_mining_difficulty_adjustments(self) -> List[DifficultyAdjustmentEntry]: + """Difficulty adjustments (all time). + + Get historical difficulty adjustments. Returns array of [timestamp, height, difficulty, change_percent].""" + return self.get('/api/v1/mining/difficulty-adjustments') + + def get_v1_mining_difficulty_adjustments_by_time_period(self, time_period: TimePeriod) -> List[DifficultyAdjustmentEntry]: + """Difficulty adjustments. + + Get historical difficulty adjustments for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y. Returns array of [timestamp, height, difficulty, change_percent].""" + return self.get(f'/api/v1/mining/difficulty-adjustments/{time_period}') + + def get_v1_mining_hashrate(self) -> HashrateSummary: + """Network hashrate (all time). + + Get network hashrate and difficulty data for all time.""" + return self.get('/api/v1/mining/hashrate') + + def get_v1_mining_hashrate_by_time_period(self, time_period: TimePeriod) -> HashrateSummary: + """Network hashrate. + + Get network hashrate and difficulty data for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" + return self.get(f'/api/v1/mining/hashrate/{time_period}') + + def get_v1_mining_pool_by_slug(self, slug: PoolSlug) -> PoolDetail: + """Mining pool details. + + Get detailed information about a specific mining pool including block counts and shares for different time periods.""" + return self.get(f'/api/v1/mining/pool/{slug}') + + def get_v1_mining_pools(self) -> List[PoolInfo]: + """List all mining pools. + + Get list of all known mining pools with their identifiers.""" + return self.get('/api/v1/mining/pools') + + def get_v1_mining_pools_by_time_period(self, time_period: TimePeriod) -> PoolsSummary: + """Mining pool statistics. + + Get mining pool statistics for a time period. Valid periods: 24h, 3d, 1w, 1m, 3m, 6m, 1y, 2y, 3y""" + return self.get(f'/api/v1/mining/pools/{time_period}') + + def get_v1_mining_reward_stats_by_block_count(self, block_count: int) -> RewardStats: + """Mining reward statistics. + + Get mining reward statistics for the last N blocks including total rewards, fees, and transaction count.""" + return self.get(f'/api/v1/mining/reward-stats/{block_count}') + + def get_v1_validate_address(self, address: str) -> AddressValidation: + """Validate address. + + Validate a Bitcoin address and get information about its type and scriptPubKey.""" + return self.get(f'/api/v1/validate-address/{address}') + + def get_health(self) -> Health: + """Health check. + + Returns the health status of the API server""" + return self.get('/health') + + def get_version(self) -> str: + """API version. + + Returns the current version of the API server""" + return self.get('/version') + diff --git a/packages/brk_client/main.py b/packages/brk_client/main.py new file mode 100644 index 000000000..00b95010d --- /dev/null +++ b/packages/brk_client/main.py @@ -0,0 +1,6 @@ +def main(): + print("Hello from brk-client!") + + +if __name__ == "__main__": + main() diff --git a/packages/brk_client/pyproject.toml b/packages/brk_client/pyproject.toml new file mode 100644 index 000000000..fb5b5a3a2 --- /dev/null +++ b/packages/brk_client/pyproject.toml @@ -0,0 +1,33 @@ +[project] +name = "brk-client" +version = "0.1.0-alpha.1" +description = "Python client for the Bitcoin Research Kit" +readme = "README.md" +requires-python = ">=3.9" +license = "MIT" +keywords = ["bitcoin", "blockchain", "analytics", "on-chain"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Typing :: Typed", +] +dependencies = ["httpx>=0.25.0"] + +[project.urls] +Homepage = "https://bitcoinresearchkit.org" +Repository = "https://github.com/bitcoinresearchkit/brk" + +[dependency-groups] +dev = ["pytest"] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + diff --git a/packages/brk_client/tests/test_client.py b/packages/brk_client/tests/test_client.py new file mode 100644 index 000000000..d6659b846 --- /dev/null +++ b/packages/brk_client/tests/test_client.py @@ -0,0 +1,21 @@ +from __future__ import print_function + +from brk_client import VERSION, BrkClient + + +def test_version(): + assert VERSION.startswith("v") + + +def test_client_creation(): + client = BrkClient("http://localhost:3110") + assert client.base_url == "http://localhost:3110" + + +def test_tree_exists(): + client = BrkClient("http://localhost:3110") + print(client.get_api_block_height_by_height(800000)) + print(client.get_api_metric_by_metric_by_index("price_close", "dateindex")) + assert hasattr(client, "tree") + assert hasattr(client.tree, "computed") + assert hasattr(client.tree, "indexed") diff --git a/packages/brk_client/uv.lock b/packages/brk_client/uv.lock new file mode 100644 index 000000000..5b1712d40 --- /dev/null +++ b/packages/brk_client/uv.lock @@ -0,0 +1,268 @@ +version = 1 +revision = 3 +requires-python = ">=3.9" +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version < '3.10'", +] + +[[package]] +name = "anyio" +version = "4.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" }, +] + +[[package]] +name = "brk-client" +version = "0.1.0a1" +source = { editable = "." } +dependencies = [ + { name = "httpx" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "pytest", version = "9.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] + +[package.metadata] +requires-dist = [{ name = "httpx", specifier = ">=0.25.0" }] + +[package.metadata.requires-dev] +dev = [{ name = "pytest" }] + +[[package]] +name = "certifi" +version = "2025.11.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.10'" }, + { name = "iniconfig", version = "2.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, + { name = "pluggy", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version == '3.10.*'" }, + { name = "iniconfig", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "packaging", marker = "python_full_version >= '3.10'" }, + { name = "pluggy", marker = "python_full_version >= '3.10'" }, + { name = "pygments", marker = "python_full_version >= '3.10'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] diff --git a/websites/bitview/CLAUDE.md b/websites/bitview/CLAUDE.md new file mode 100644 index 000000000..f94af66ab --- /dev/null +++ b/websites/bitview/CLAUDE.md @@ -0,0 +1,7 @@ +# Types + +To check types run: + +```sh +npx --package typescript tsc --noEmit --pretty false | grep -v "modules/" +``` diff --git a/websites/bitview/scripts/core/chart/index.js b/websites/bitview/scripts/chart/index.js similarity index 93% rename from websites/bitview/scripts/core/chart/index.js rename to websites/bitview/scripts/chart/index.js index aa67633fd..e4244d1c5 100644 --- a/websites/bitview/scripts/core/chart/index.js +++ b/websites/bitview/scripts/chart/index.js @@ -1,4 +1,4 @@ -/** @import { IChartApi, ISeriesApi as _ISeriesApi, SeriesDefinition, SingleValueData as _SingleValueData, CandlestickData as _CandlestickData, BaselineData as _BaselineData, HistogramData as _HistogramData, SeriesType, IPaneApi, LineSeriesPartialOptions as _LineSeriesPartialOptions, HistogramSeriesPartialOptions as _HistogramSeriesPartialOptions, BaselineSeriesPartialOptions as _BaselineSeriesPartialOptions, CandlestickSeriesPartialOptions as _CandlestickSeriesPartialOptions, WhitespaceData, DeepPartial, ChartOptions, Time, LineData as _LineData, createChart as CreateChart } from '../../modules/lightweight-charts/5.0.9/dist/typings' */ +/** @import { IChartApi, ISeriesApi as _ISeriesApi, SeriesDefinition, SingleValueData as _SingleValueData, CandlestickData as _CandlestickData, BaselineData as _BaselineData, HistogramData as _HistogramData, SeriesType, IPaneApi, LineSeriesPartialOptions as _LineSeriesPartialOptions, HistogramSeriesPartialOptions as _HistogramSeriesPartialOptions, BaselineSeriesPartialOptions as _BaselineSeriesPartialOptions, CandlestickSeriesPartialOptions as _CandlestickSeriesPartialOptions, WhitespaceData, DeepPartial, ChartOptions, Time, LineData as _LineData, createChart as CreateChart } from '../modules/lightweight-charts/5.0.9/dist/typings' */ import { createChart as _createChart, @@ -7,7 +7,7 @@ import { LineSeries, BaselineSeries, // } from "../modules/lightweight-charts/5.0.9/dist/lightweight-charts.standalone.development.mjs"; -} from "../../modules/lightweight-charts/5.0.9/dist/lightweight-charts.standalone.production.mjs"; +} from "../modules/lightweight-charts/5.0.9/dist/lightweight-charts.standalone.production.mjs"; const createChart = /** @type {CreateChart} */ (_createChart); @@ -15,12 +15,12 @@ import { createHorizontalChoiceField, createLabeledInput, createSpanName, -} from "../dom"; +} from "../utils/dom"; import { createOklchToRGBA } from "./oklch"; -import { throttle } from "../timing"; -import { serdeBool } from "../serde"; -import { stringToId } from "../format"; -import { style } from "../elements"; +import { throttle } from "../utils/timing"; +import { serdeBool } from "../utils/serde"; +import { stringToId } from "../utils/format"; +import { style } from "../utils/elements"; /** * @typedef {Object} Valued @@ -68,6 +68,7 @@ const lineWidth = /** @type {any} */ (1.5); * @param {Signals} args.signals * @param {Colors} args.colors * @param {Resources} args.resources + * @param {BrkClient} args.brk * @param {Accessor} args.index * @param {((unknownTimeScaleCallback: VoidFunction) => void)} [args.timeScaleSetCallback] * @param {true} [args.fitContent] @@ -80,6 +81,7 @@ function createChartElement({ id: chartId, index, resources, + brk, timeScaleSetCallback, fitContent, config, @@ -204,7 +206,9 @@ function createChartElement({ }); }); - const activeResources = /** @type {Set} */ (new Set()); + const activeResources = /** @type {Set>} */ ( + new Set() + ); ichart.subscribeCrosshairMove( throttle(() => { activeResources.forEach((v) => { @@ -312,7 +316,7 @@ function createChartElement({ * @param {number} args.order * @param {Color[]} args.colors * @param {SeriesType} args.seriesType - * @param {Metric} [args.metric] + * @param {MetricAccessor} [args.metric] * @param {SetDataCallback} [args.setDataCallback] * @param {Accessor[]>} [args.data] * @param {number} args.paneIndex @@ -353,7 +357,7 @@ function createChartElement({ iseries.setSeriesOrder(order); - /** @type {MetricResource | undefined} */ + /** @type {MetricResource | undefined} */ let _valuesResource; /** @type {Series} */ @@ -375,27 +379,35 @@ function createChartElement({ if (metric) { signals.createEffect(index, (index) => { - const timeResource = resources.metrics.getOrCreate( - index === "height" ? "timestamp_fixed" : "timestamp", - index, - ); - timeResource.fetch(); + // Get timestamp metric from tree based on index type + // timestampFixed has height only, timestamp has date-based indexes + const timeMetric = + index === "height" + ? brk.tree.computed.blocks.time.timestampFixed + : brk.tree.computed.blocks.time.timestamp; + const timeNode = timeMetric.by[index]; + const valuesNode = metric.by[index]; + if (!timeNode || !valuesNode) + throw new Error(`Missing node for index: ${index}`); - const valuesResource = resources.metrics.getOrCreate(metric, index); + const timeResource = resources.useMetricNode(timeNode); + const valuesResource = resources.useMetricNode(valuesNode); _valuesResource = valuesResource; - series.url.set(() => valuesResource.url); + series.url.set(() => `${brk.baseUrl}${valuesResource.path}`); signals.createEffect(active, (active) => { if (active) { + timeResource.fetch(); valuesResource.fetch(); activeResources.add(valuesResource); - const fetchedKey = resources.metrics.genKey(); + const timeRange = timeResource.range(); + const valuesRange = valuesResource.range(); signals.createEffect( () => ({ - _indexes: timeResource.fetched().get(fetchedKey)?.data(), - values: valuesResource.fetched().get(fetchedKey)?.data(), + _indexes: timeRange.response()?.data, + values: valuesRange.response()?.data, }), ({ _indexes, values }) => { if (!_indexes?.length || !values?.length) return; @@ -434,6 +446,8 @@ function createChartElement({ // if (sameTime) { // console.log(data[offsetedI]); // } + if (!Array.isArray(v) || v.length !== 4) + throw new Error(`Expected OHLC tuple, got: ${v}`); let [open, high, low, close] = v; data[offsetedI] = { time, @@ -545,7 +559,7 @@ function createChartElement({ * @param {string} args.name * @param {Unit} args.unit * @param {number} args.order - * @param {Metric} [args.metric] + * @param {MetricAccessor} [args.metric] * @param {Accessor} [args.data] * @param {number} [args.paneIndex] * @param {boolean} [args.defaultActive] @@ -605,7 +619,7 @@ function createChartElement({ * @param {Unit} args.unit * @param {number} args.order * @param {Color} args.color - * @param {Metric} [args.metric] + * @param {MetricAccessor} [args.metric] * @param {Accessor} [args.data] * @param {number} [args.paneIndex] * @param {boolean} [args.defaultActive] @@ -657,7 +671,7 @@ function createChartElement({ * @param {Unit} args.unit * @param {number} args.order * @param {Accessor} [args.data] - * @param {Metric} [args.metric] + * @param {MetricAccessor} [args.metric] * @param {Color} [args.color] * @param {SetDataCallback} [args.setDataCallback] * @param {number} [args.paneIndex] @@ -716,7 +730,7 @@ function createChartElement({ * @param {Unit} args.unit * @param {number} args.order * @param {Accessor} [args.data] - * @param {Metric} [args.metric] + * @param {MetricAccessor} [args.metric] * @param {SetDataCallback} [args.setDataCallback] * @param {number} [args.paneIndex] * @param {boolean} [args.defaultActive] diff --git a/websites/bitview/scripts/core/chart/oklch.js b/websites/bitview/scripts/chart/oklch.js similarity index 100% rename from websites/bitview/scripts/core/chart/oklch.js rename to websites/bitview/scripts/chart/oklch.js diff --git a/websites/bitview/scripts/entry.js b/websites/bitview/scripts/entry.js index 11ea60b7b..bde283312 100644 --- a/websites/bitview/scripts/entry.js +++ b/websites/bitview/scripts/entry.js @@ -1,24 +1,21 @@ /** * @import * as _ from "./modules/leeoniya-ufuzzy/1.0.19/dist/uFuzzy.d.ts" * - * @import { Signal, Signals, Accessor } from "./modules/brk-signals/index"; + * @import { Signal, Signals, Accessor } from "./signals"; * - * @import { BRK } from "./modules/brk-client/index.js" - * @import { Metric, MetricToIndexes } from "./modules/brk-client/metrics" - * @import { IndexName } from "./modules/brk-client/generated/metrics" - * @import { PoolId, PoolIdToPoolName } from "./modules/brk-client/generated/pools" + * @import { BrkClient, CatalogTree_Computed_Distribution_UtxoCohorts as UtxoCohortTree, CatalogTree_Computed_Distribution_AddressCohorts as AddressCohortTree, CatalogTree_Computed_Distribution_UtxoCohorts_All as AllUtxoPattern, UpTo1dPattern as MaxAgePattern, _10yTo12yPattern as MinAgePattern, _0satsPattern2 as UtxoAmountPattern, _0satsPattern as AddressAmountPattern, Ratio1ySdPattern, Dollars, Price111dSmaPattern as EmaRatioPattern, Index, BlockCountPattern, BitcoinPattern, BlockSizePattern, BlockIntervalPattern, CoinbasePattern, Constant0Pattern, ActivePriceRatioPattern, _0satsPattern, PricePaidPattern2, UnclaimedRewardsPattern as ValuePattern, SentPattern as RewardPattern, Metric } from "./modules/brk-client/index.js" * - * @import { Resources, MetricResource } from './modules/brk-resources/index.js' + * @import { Resources, MetricResource } from './resources' * - * @import { Valued, SingleValueData, CandlestickData, Series, ISeries, HistogramData, LineData, BaselineData, LineSeriesPartialOptions, BaselineSeriesPartialOptions, HistogramSeriesPartialOptions, CandlestickSeriesPartialOptions, CreateChartElement, Chart } from "./core/chart/index" + * @import { Valued, SingleValueData, CandlestickData, Series, ISeries, HistogramData, LineData, BaselineData, LineSeriesPartialOptions, BaselineSeriesPartialOptions, HistogramSeriesPartialOptions, CandlestickSeriesPartialOptions, CreateChartElement, Chart } from "./chart/index" * - * @import { Color, ColorName, Colors } from "./core/colors" + * @import { Color, ColorName, Colors } from "./utils/colors" * - * @import { WebSockets } from "./core/ws" + * @import { WebSockets } from "./utils/ws" * - * @import { Option, PartialChartOption, ChartOption, AnyPartialOption, ProcessedOptionAddons, OptionsTree, SimulationOption, AnySeriesBlueprint, SeriesType, AnyFetchedSeriesBlueprint, TableOption, ExplorerOption, UrlOption, PartialOptionsGroup, OptionsGroup, PartialOptionsTree } from "./core/options/partial" + * @import { Option, PartialChartOption, ChartOption, AnyPartialOption, ProcessedOptionAddons, OptionsTree, SimulationOption, AnySeriesBlueprint, SeriesType, AnyFetchedSeriesBlueprint, TableOption, ExplorerOption, UrlOption, PartialOptionsGroup, OptionsGroup, PartialOptionsTree, UtxoCohortObject, AddressCohortObject, CohortObject, UtxoCohortGroupObject, AddressCohortGroupObject, CohortGroupObject, MetricAccessor, FetchedLineSeriesBlueprint, PartialContext, AgeCohortObject, AmountCohortObject, AgeCohortGroupObject, AmountCohortGroupObject } from "./options/partial/index.js" * - * @import { Unit } from "./core/serde" + * @import { Unit } from "./utils/serde" * * @import { ChartableIndexName } from "./panes/chart/index.js"; */ @@ -28,6 +25,40 @@ /** * @typedef {typeof import("./lazy")["default"]} Modules * @typedef {[number, number, number, number]} OHLCTuple + * + * @typedef {InstanceType["INDEXES"]} Indexes + * @typedef {Indexes[number]} IndexName + * @typedef {InstanceType["POOL_ID_TO_POOL_NAME"]} PoolIdToPoolName + * @typedef {keyof PoolIdToPoolName} PoolId + * + * Pattern unions by cohort type + * @typedef {AllUtxoPattern | MaxAgePattern | MinAgePattern | UtxoAmountPattern} UtxoCohortPattern + * @typedef {AddressAmountPattern} AddressCohortPattern + * @typedef {UtxoCohortPattern | AddressCohortPattern} CohortPattern + * + * Capability-based pattern groupings (patterns that have specific properties) + * @typedef {AllUtxoPattern | MinAgePattern | UtxoAmountPattern} PatternWithRealizedPrice + * @typedef {AllUtxoPattern} PatternWithFullRealized + * @typedef {AllUtxoPattern | MinAgePattern | UtxoAmountPattern} PatternWithNupl + * @typedef {AllUtxoPattern | MinAgePattern | UtxoAmountPattern} PatternWithPricePaidStats + * @typedef {AllUtxoPattern | MinAgePattern | UtxoAmountPattern} PatternWithActivity + * @typedef {AllUtxoPattern | MaxAgePattern | MinAgePattern} PatternWithPricePercentiles + * + * Cohort objects with specific pattern capabilities + * @typedef {{ name: string, title: string, color: Color, tree: PatternWithRealizedPrice }} CohortWithRealizedPrice + * @typedef {{ name: string, title: string, color: Color, tree: PatternWithFullRealized }} CohortWithFullRealized + * @typedef {{ name: string, title: string, color: Color, tree: PatternWithNupl }} CohortWithNupl + * @typedef {{ name: string, title: string, color: Color, tree: PatternWithPricePaidStats }} CohortWithPricePaidStats + * @typedef {{ name: string, title: string, color: Color, tree: PatternWithActivity }} CohortWithActivity + * @typedef {{ name: string, title: string, color: Color, tree: PatternWithPricePercentiles }} CohortWithPricePercentiles + * + * Tree branch types + * @typedef {InstanceType["tree"]["computed"]["market"]} Market + * @typedef {Market["movingAverage"]} MarketMovingAverage + * @typedef {Market["dca"]} MarketDca + * + * Generic tree node type for walking + * @typedef {MetricAccessor | Record} TreeNode */ // DO NOT CHANGE, Exact format is expected in `brk_bundler` diff --git a/websites/bitview/scripts/lazy.js b/websites/bitview/scripts/lazy.js index 8dfb620e4..1c36e0468 100644 --- a/websites/bitview/scripts/lazy.js +++ b/websites/bitview/scripts/lazy.js @@ -1,6 +1,6 @@ const imports = { async signals() { - return import("./modules/brk-signals/index.js").then((d) => d.default); + return import("./signals.js").then((d) => d.default); }, async leanQr() { return import("./modules/lean-qr/2.6.0/index.mjs").then((d) => d); @@ -13,17 +13,17 @@ const imports = { async brkClient() { return import("./modules/brk-client/index.js").then((d) => d); }, - async brkResources() { - return import("./modules/brk-resources/index.js").then((d) => d); + async resources() { + return import("./resources.js").then((d) => d); }, async chart() { return window.document.fonts.ready.then(() => - import("./core/chart/index.js").then((d) => d.default), + import("./chart/index.js").then((d) => d.default), ); }, async options() { - return import("./core/options/full.js").then((d) => d); + return import("./options/full.js").then((d) => d); }, }; diff --git a/websites/bitview/scripts/main.js b/websites/bitview/scripts/main.js index 8fce549c7..9e59f3ad8 100644 --- a/websites/bitview/scripts/main.js +++ b/websites/bitview/scripts/main.js @@ -1,11 +1,11 @@ -import { createColors } from "./core/colors"; -import { createWebSockets } from "./core/ws"; -import * as formatters from "./core/format"; +import { createColors } from "./utils/colors"; +import { createWebSockets } from "./utils/ws"; +import * as formatters from "./utils/format"; import modules from "./lazy"; -import { onFirstIntersection, getElementById, isHidden } from "./core/dom"; -import { next } from "./core/timing"; -import { replaceHistory } from "./core/url"; -import { removeStored, writeToStorage } from "./core/storage"; +import { onFirstIntersection, getElementById, isHidden } from "./utils/dom"; +import { next } from "./utils/timing"; +import { replaceHistory } from "./utils/url"; +import { removeStored, writeToStorage } from "./utils/storage"; import { asideElement, asideLabelElement, @@ -22,7 +22,7 @@ import { simulationElement, style, tableElement, -} from "./core/elements"; +} from "./utils/elements"; function initFrameSelectors() { const children = Array.from(frameSelectorsElement.children); @@ -109,15 +109,15 @@ initFrameSelectors(); Promise.all([ modules.signals(), modules.brkClient(), - modules.brkResources(), + modules.resources(), modules.options(), -]).then(([signals, { BrkClient, VERSION }, { createResources }, { initOptions }]) => +]).then(([signals, { BrkClient }, { createResources }, { initOptions }]) => signals.createRoot(() => { const brk = new BrkClient("/"); const resources = createResources(signals); const owner = signals.getOwner(); - console.log(`VERSION = ${VERSION}`); + console.log(`VERSION = ${brk.VERSION}`); function initDark() { const preferredColorSchemeMatchMedia = window.matchMedia( diff --git a/websites/bitview/scripts/core/options/full.js b/websites/bitview/scripts/options/full.js similarity index 94% rename from websites/bitview/scripts/core/options/full.js rename to websites/bitview/scripts/options/full.js index e36f01b56..a731ac4dc 100644 --- a/websites/bitview/scripts/core/options/full.js +++ b/websites/bitview/scripts/options/full.js @@ -1,22 +1,25 @@ -import { createPartialOptions } from "./partial"; +import { createPartialOptions } from "./partial/index.js"; import { createButtonElement, createAnchorElement, insertElementAtIndex, -} from "../dom"; -import { serdeUnit } from "../serde"; -import { pushHistory, resetParams } from "../url"; -import { readStored, writeToStorage } from "../storage"; -import { stringToId } from "../format"; +} from "../utils/dom"; +import { serdeUnit } from "../utils/serde"; +import { pushHistory, resetParams } from "../utils/url"; +import { readStored, writeToStorage } from "../utils/storage"; +import { stringToId } from "../utils/format"; +import { collect, markUsed, logUnused } from "./unused.js"; /** * @param {Object} args * @param {Colors} args.colors * @param {Signals} args.signals - * @param {BRK} args.brk + * @param {BrkClient} args.brk * @param {Signal} args.qrcode */ export function initOptions({ colors, signals, brk, qrcode }) { + collect(brk.tree); + const LS_SELECTED_KEY = `selected_path`; const urlPath_ = window.document.location.pathname @@ -46,11 +49,9 @@ export function initOptions({ colors, signals, brk, qrcode }) { */ function arrayToRecord(arr = []) { return (arr || []).reduce((record, blueprint) => { - if (!brk.hasMetric(blueprint.metric)) { - // if (localhost && !brk.hasMetric(blueprint.metric)) { - throw Error(`${blueprint.metric} not recognized`); - } - const unit = blueprint.unit ?? serdeUnit.deserialize(blueprint.metric); + markUsed(blueprint.metric); + // Use any index's path - unit is the same regardless of index (e.g., supply is "sats" for both height and dateindex) + const unit = blueprint.unit ?? serdeUnit.deserialize(blueprint.metric.name); record[unit] ??= []; record[unit].push(blueprint); return record; @@ -347,6 +348,7 @@ export function initOptions({ colors, signals, brk, qrcode }) { ); } recursiveProcessPartialTree(partialOptions, parent); + logUnused(); if (!selected()) { const option = diff --git a/websites/bitview/scripts/core/options/partial.js b/websites/bitview/scripts/options/partial.js similarity index 64% rename from websites/bitview/scripts/core/options/partial.js rename to websites/bitview/scripts/options/partial.js index 2723f723a..8052f135c 100644 --- a/websites/bitview/scripts/core/options/partial.js +++ b/websites/bitview/scripts/options/partial.js @@ -1,3 +1,12 @@ +// @ts-nocheck + +/** + * A tree accessor - an object with a `.by` property containing MetricNodes keyed by index name. + * Example: brk.tree.computed.price.priceClose has { by: { dateindex: MetricNode, height: MetricNode, ... } } + * @template T + * @typedef {{ by: Partial>>, indexes: () => Index[] }} MetricAccessor + */ + /** * @typedef {Object} BaseSeriesBlueprint * @property {string} title @@ -36,7 +45,7 @@ * * @typedef {AnySeriesBlueprint["type"]} SeriesType * - * @typedef {{ metric: Metric, unit?: Unit }} FetchedAnySeriesOptions + * @typedef {{ metric: MetricAccessor, unit?: Unit }} FetchedAnySeriesOptions * * @typedef {BaselineSeriesBlueprint & FetchedAnySeriesOptions} FetchedBaselineSeriesBlueprint * @typedef {CandlestickSeriesBlueprint & FetchedAnySeriesOptions} FetchedCandlestickSeriesBlueprint @@ -117,15 +126,18 @@ * */ -import { localhost } from "../env"; +import { localhost } from "../utils/env"; /** * @param {Object} args * @param {Colors} args.colors - * @param {BRK} args.brk + * @param {BrkClient} args.brk * @returns {PartialOptionsTree} */ export function createPartialOptions({ colors, brk }) { + /** @type {(obj: T) => [keyof T, T[keyof T]][]} */ + const entries = Object.entries; + /** * @param {string} id * @param {boolean} compoundAdjective @@ -139,28 +151,31 @@ export function createPartialOptions({ colors, brk }) { .replace("y", ` year${suffix}`); } + const market = brk.tree.computed.market; const averages = /** @type {const} */ ([ - ["1w", 7, "red"], - ["8d", 8, "orange"], - ["13d", 13, "amber"], - ["21d", 21, "yellow"], - ["1m", 30, "lime"], - ["34d", 34, "green"], - ["55d", 55, "emerald"], - ["89d", 89, "teal"], - ["144d", 144, "cyan"], - ["200d", 200, "sky"], - ["1y", 365, "blue"], - ["2y", 730, "indigo"], - ["200w", 1400, "violet"], - ["4y", 1460, "purple"], + ["1w", 7, "red", market.price1wSma, market.price1wEma], + ["8d", 8, "orange", market.price8dSma, market.price8dEma], + ["13d", 13, "amber", market.price13dSma, market.price13dEma], + ["21d", 21, "yellow", market.price21dSma, market.price21dEma], + ["1m", 30, "lime", market.price1mSma, market.price1mEma], + ["34d", 34, "green", market.price34dSma, market.price34dEma], + ["55d", 55, "emerald", market.price55dSma, market.price55dEma], + ["89d", 89, "teal", market.price89dSma, market.price89dEma], + ["144d", 144, "cyan", market.price144dSma, market.price144dEma], + ["200d", 200, "sky", market.price200dSma, market.price200dEma], + ["1y", 365, "blue", market.price1ySma, market.price1yEma], + ["2y", 730, "indigo", market.price2ySma, market.price2yEma], + ["200w", 1400, "violet", market.price200wSma, market.price200wEma], + ["4y", 1460, "purple", market.price4ySma, market.price4yEma], ]).map( - ([id, days, colorKey]) => + ([id, days, colorKey, sma, ema]) => /** @type {const} */ ({ id, name: periodIdToName(id, true), days, color: colors[colorKey], + sma, + ema, }), ); @@ -185,360 +200,359 @@ export function createPartialOptions({ colors, brk }) { }), ); + const utxoCohorts = brk.tree.computed.stateful.utxoCohorts; + const addressCohorts = brk.tree.computed.stateful.addressCohorts; + const { + TERM_NAMES, + EPOCH_NAMES, + MAX_AGE_NAMES, + MIN_AGE_NAMES, + AGE_RANGE_NAMES, + GE_AMOUNT_NAMES, + LT_AMOUNT_NAMES, + AMOUNT_RANGE_NAMES, + SPENDABLE_TYPE_NAMES, + } = brk; + const cohortAll = /** @type {const} */ ({ - id: "", name: "", title: "", color: colors.orange, + tree: utxoCohorts.all, }); const cohortAllForComparaison = /** @type {const} */ ({ - id: "", name: "all", title: "", color: colors.default, + tree: utxoCohorts.all, }); - const terms = /** @type {const} */ ([ - ["sth", "short", "yellow"], - ["lth", "long", "fuchsia"], - ]).map( - ([id, name, colorKey]) => - /** @type {const} */ ({ - id, - name, - title: `${name} term holders`, - color: colors[colorKey], - }), - ); + const constant100 = flattenConstant(brk.tree.computed.constants.constant100); - const upToDate = /** @type {const} */ ([ - ["1d", "pink"], - ["1w", "red"], - ["1m", "orange"], - ["2m", "amber"], - ["3m", "yellow"], - ["4m", "lime"], - ["5m", "green"], - ["6m", "teal"], - ["1y", "sky"], - ["2y", "indigo"], - ["3y", "violet"], - ["4y", "purple"], - ["5y", "fuchsia"], - ["6y", "pink"], - ["7y", "red"], - ["8y", "orange"], - ["10y", "amber"], - ["12y", "yellow"], - ["15y", "lime"], - ]).map( - ([name, colorKey]) => - /** @type {const} */ ({ - id: `utxos_up_to_${name}_old`, - name, - title: `utxos up to ${periodIdToName(name, false)} old`, - color: colors[colorKey], - }), - ); - - const fromDate = /** @type {const} */ ([ - ["1d", "red"], - ["1w", "orange"], - ["1m", "yellow"], - ["2m", "lime"], - ["3m", "green"], - ["4m", "teal"], - ["5m", "cyan"], - ["6m", "blue"], - ["1y", "indigo"], - ["2y", "violet"], - ["3y", "purple"], - ["4y", "fuchsia"], - ["5y", "pink"], - ["6y", "rose"], - ["7y", "red"], - ["8y", "orange"], - ["10y", "yellow"], - ["12y", "lime"], - ["15y", "green"], - ]).map( - ([name, colorKey]) => - /** @type {const} */ ({ - id: `utxos_at_least_${name}_old`, - name, - title: `UTXOs at least ${periodIdToName(name, false)} old`, - color: colors[colorKey], - }), - ); - - const dateRange = /** @type {const} */ ([ - { - id: "utxos_up_to_1d_old", - name: "1d", - title: "UTXOs New Up to 1 Day old", - color: colors.pink, - }, - .../** @type {const} */ ([ - [["1d", "1w"], "red"], - [["1w", "1m"], "orange"], - [["1m", "2m"], "yellow"], - [["2m", "3m"], "yellow"], - [["3m", "4m"], "lime"], - [["4m", "5m"], "lime"], - [["5m", "6m"], "lime"], - [["6m", "1y"], "green"], - [["1y", "2y"], "cyan"], - [["2y", "3y"], "blue"], - [["3y", "4y"], "indigo"], - [["4y", "5y"], "violet"], - [["5y", "6y"], "purple"], - [["6y", "7y"], "purple"], - [["7y", "8y"], "fuchsia"], - [["8y", "10y"], "fuchsia"], - [["10y", "12y"], "pink"], - [["12y", "15y"], "red"], - ]).map( - ([[start, end], colorKey]) => - /** @type {const} */ ({ - id: `utxos_at_least_${start}_up_to_${end}_old`, - name: `${start}..${end}`, - title: `utxos at least ${periodIdToName(start, false)} ago up to ${periodIdToName(end, false)} old`, - color: colors[colorKey], - }), - ), - { - id: "utxos_at_least_15y_old", - name: "15y+", - title: "UTXOs At least 15 Years old up to genesis", - color: colors.orange, - }, - ]); - - const epoch = /** @type {const} */ ([ - ["0", "red"], - ["1", "yellow"], - ["2", "orange"], - ["3", "lime"], - ["4", "green"], - ]).map( - ([name, colorKey]) => - /** @type {const} */ ({ - id: `epoch_${name}`, - name, - title: `Epoch ${name}`, - color: colors[colorKey], - }), - ); - - /** - * @param {string} amount - */ - function formatAmount(amount) { - return amount - .replace("sats", " sats") - .replace("btc", " btc") - .replace("_", ""); - } - - const aboveAmount = /** @type {const} */ ([ - ["1sat", "orange"], - ["10sats", "orange"], - ["100sats", "yellow"], - ["1k_sats", "lime"], - ["10k_sats", "green"], - ["100k_sats", "cyan"], - ["1m_sats", "blue"], - ["10m_sats", "indigo"], - ["1btc", "purple"], - ["10btc", "violet"], - ["100btc", "fuchsia"], - ["1k_btc", "pink"], - ["10k_btc", "red"], - ]).map(([amount, colorKey]) => { - const text = formatAmount(amount); + const termColors = /** @type {const} */ ({ + short: "yellow", + long: "fuchsia", + }); + const terms = entries(utxoCohorts.term).map(([key, tree]) => { + const names = TERM_NAMES[key]; return /** @type {const} */ ({ - id: `above_${amount}`, - name: `>=${text}`, - title: `Above ${text}`, - color: colors[colorKey], + name: names.short, + title: names.long, + color: colors[termColors[key]], + tree, }); }); - const utxosAboveAmount = aboveAmount.map( - ({ id, name, title, color }) => - /** @type {const} */ ({ - id: `utxos_${id}`, - name, - title: `UTXOs ${title}`, - color, - }), - ); - - const addressesAboveAmount = aboveAmount.map( - ({ id, name, title, color }) => - /** @type {const} */ ({ - id: `addrs_${id}`, - name, - title: `Addresses ${title}`, - color, - }), - ); - - const underAmount = /** @type {const} */ ([ - ["10sats", "orange"], - ["100sats", "yellow"], - ["1k_sats", "lime"], - ["10k_sats", "green"], - ["100k_sats", "cyan"], - ["1m_sats", "blue"], - ["10m_sats", "indigo"], - ["1btc", "purple"], - ["10btc", "violet"], - ["100btc", "fuchsia"], - ["1k_btc", "pink"], - ["10k_btc", "red"], - ["100k_btc", "orange"], - ]).map(([amount, colorKey]) => { - const text = formatAmount(amount); + const maxAgeColors = /** @type {const} */ ({ + _1w: "red", + _1m: "orange", + _2m: "amber", + _3m: "yellow", + _4m: "lime", + _5m: "green", + _6m: "teal", + _1y: "sky", + _2y: "indigo", + _3y: "violet", + _4y: "purple", + _5y: "fuchsia", + _6y: "pink", + _7y: "red", + _8y: "orange", + _10y: "amber", + _12y: "yellow", + _15y: "lime", + }); + const upToDate = entries(utxoCohorts.maxAge).map(([key, tree]) => { + const names = MAX_AGE_NAMES[key]; return /** @type {const} */ ({ - id: `under_${amount}`, - name: `<${text}`, - title: `under ${text}`, - color: colors[colorKey], + name: names.short, + title: names.long, + color: colors[maxAgeColors[key]], + tree, }); }); - const utxosUnderAmount = underAmount.map( - ({ id, name, title, color }) => - /** @type {const} */ ({ - id: `utxos_${id}`, - name, - title: `UTXOs ${title}`, - color, - }), - ); + const minAgeColors = /** @type {const} */ ({ + _1d: "red", + _1w: "orange", + _1m: "yellow", + _2m: "lime", + _3m: "green", + _4m: "teal", + _5m: "cyan", + _6m: "blue", + _1y: "indigo", + _2y: "violet", + _3y: "purple", + _4y: "fuchsia", + _5y: "pink", + _6y: "rose", + _7y: "red", + _8y: "orange", + _10y: "yellow", + _12y: "lime", + }); + const fromDate = entries(utxoCohorts.minAge).map(([key, tree]) => { + const names = MIN_AGE_NAMES[key]; + return /** @type {const} */ ({ + name: names.short, + title: names.long, + color: colors[minAgeColors[key]], + tree, + }); + }); - const addressesUnderAmount = underAmount.map( - ({ id, name, title, color }) => - /** @type {const} */ ({ - id: `addrs_${id}`, - name, - title: `Addresses ${title}`, - color, - }), - ); + const ageRangeColors = /** @type {const} */ ({ + upTo1d: "pink", + _1dTo1w: "red", + _1wTo1m: "orange", + _1mTo2m: "yellow", + _2mTo3m: "yellow", + _3mTo4m: "lime", + _4mTo5m: "lime", + _5mTo6m: "lime", + _6mTo1y: "green", + _1yTo2y: "cyan", + _2yTo3y: "blue", + _3yTo4y: "indigo", + _4yTo5y: "violet", + _5yTo6y: "purple", + _6yTo7y: "purple", + _7yTo8y: "fuchsia", + _8yTo10y: "fuchsia", + _10yTo12y: "pink", + _12yTo15y: "red", + from15y: "orange", + }); + const dateRange = entries(utxoCohorts.ageRange).map(([key, tree]) => { + const names = AGE_RANGE_NAMES[key]; + return /** @type {const} */ ({ + name: names.short, + title: names.long, + color: colors[ageRangeColors[key]], + tree, + }); + }); - const amountRanges = /** @type {const} */ ([ - { - id: "with_0sats", - name: "0 sats", - title: "valued 0 sats", - color: colors.red, - }, - .../** @type {const} */ ([ - [["1sat", "10sats"], "orange"], - [["10sats", "100sats"], "yellow"], - [["100sats", "1k_sats"], "lime"], - [["1k_sats", "10k_sats"], "green"], - [["10k_sats", "100k_sats"], "cyan"], - [["100k_sats", "1m_sats"], "blue"], - [["1m_sats", "10m_sats"], "indigo"], - [["10m_sats", "1btc"], "purple"], - [["1btc", "10btc"], "violet"], - [["10btc", "100btc"], "fuchsia"], - [["100btc", "1k_btc"], "pink"], - [["1k_btc", "10k_btc"], "red"], - [["10k_btc", "100k_btc"], "orange"], - ]).map(([[start, end], colorKey]) => { - const startText = formatAmount(start); - const endText = formatAmount(end); + const epochColors = /** @type {const} */ ({ + _0: "red", + _1: "yellow", + _2: "orange", + _3: "lime", + _4: "green", + }); + const epoch = entries(utxoCohorts.epoch).map(([key, tree]) => { + const names = EPOCH_NAMES[key]; + return /** @type {const} */ ({ + name: names.short, + title: names.long, + color: colors[epochColors[key]], + tree, + }); + }); + + const geAmountColors = /** @type {const} */ ({ + _1sat: "orange", + _10sats: "orange", + _100sats: "yellow", + _1kSats: "lime", + _10kSats: "green", + _100kSats: "cyan", + _1mSats: "blue", + _10mSats: "indigo", + _1btc: "purple", + _10btc: "violet", + _100btc: "fuchsia", + _1kBtc: "pink", + _10kBtc: "red", + }); + const utxosAboveAmount = entries(utxoCohorts.geAmount).map(([key, tree]) => { + const names = GE_AMOUNT_NAMES[key]; + return /** @type {const} */ ({ + name: names.short, + title: names.long, + color: colors[geAmountColors[key]], + tree, + }); + }); + /** @type {readonly AddressCohortObject[]} */ + const addressesAboveAmount = entries(addressCohorts.geAmount).map( + ([key, tree]) => { + const names = GE_AMOUNT_NAMES[key]; return /** @type {const} */ ({ - id: `above_${start}_under_${end}`, - name: `${startText}..${endText}`, - title: `Above ${startText} Under ${endText}`, - color: colors[colorKey], + name: names.short, + title: names.long, + color: colors[geAmountColors[key]], + tree, }); - }), + }, + ); + + const ltAmountColors = /** @type {const} */ ({ + _10sats: "orange", + _100sats: "yellow", + _1kSats: "lime", + _10kSats: "green", + _100kSats: "cyan", + _1mSats: "blue", + _10mSats: "indigo", + _1btc: "purple", + _10btc: "violet", + _100btc: "fuchsia", + _1kBtc: "pink", + _10kBtc: "red", + _100kBtc: "orange", + }); + const utxosUnderAmount = entries(utxoCohorts.ltAmount).map(([key, tree]) => { + const names = LT_AMOUNT_NAMES[key]; + return /** @type {const} */ ({ + name: names.short, + title: names.long, + color: colors[ltAmountColors[key]], + tree, + }); + }); + /** @type {readonly AddressCohortObject[]} */ + const addressesUnderAmount = entries(addressCohorts.ltAmount).map( + ([key, tree]) => { + const names = LT_AMOUNT_NAMES[key]; + return /** @type {const} */ ({ + name: names.short, + title: names.long, + color: colors[ltAmountColors[key]], + tree, + }); + }, + ); + + const amountRangeColors = /** @type {const} */ ({ + _0sats: "red", + _1satTo10sats: "orange", + _10satsTo100sats: "yellow", + _100satsTo1kSats: "lime", + _1kSatsTo10kSats: "green", + _10kSatsTo100kSats: "cyan", + _100kSatsTo1mSats: "blue", + _1mSatsTo10mSats: "indigo", + _10mSatsTo1btc: "purple", + _1btcTo10btc: "violet", + _10btcTo100btc: "fuchsia", + _100btcTo1kBtc: "pink", + _1kBtcTo10kBtc: "red", + _10kBtcTo100kBtc: "orange", + _100kBtcOrMore: "yellow", + }); + const utxosAmountRanges = entries(utxoCohorts.amountRange).map( + ([key, tree]) => { + const names = AMOUNT_RANGE_NAMES[key]; + return /** @type {const} */ ({ + name: names.short, + title: names.long, + color: colors[amountRangeColors[key]], + tree, + }); + }, + ); + /** @type {readonly AddressCohortObject[]} */ + const addressesAmountRanges = entries(addressCohorts.amountRange).map( + ([key, tree]) => { + const names = AMOUNT_RANGE_NAMES[key]; + return /** @type {const} */ ({ + name: names.short, + title: names.long, + color: colors[amountRangeColors[key]], + tree, + }); + }, + ); + + const spendableTypeColors = /** @type {const} */ ({ + p2pk65: "red", + p2pk33: "orange", + p2pkh: "yellow", + p2ms: "lime", + p2sh: "green", + p2wpkh: "teal", + p2wsh: "blue", + p2tr: "indigo", + p2a: "purple", + unknown: "violet", + empty: "fuchsia", + }); + const type = entries(utxoCohorts.type).map(([key, tree]) => { + const names = SPENDABLE_TYPE_NAMES[key]; + return /** @type {const} */ ({ + name: names.short, + title: names.long, + color: colors[spendableTypeColors[key]], + tree, + }); + }); + + const cointime = brk.tree.computed.cointime; + const cointimePrices = /** @type {const} */ ([ { - id: "above_100k_btc", - name: "100K+ btc", - title: "Above 100K BTC", + price: cointime.trueMarketMean, + ratio: cointime.trueMarketMeanRatio, + name: "True market mean", + title: "true market mean", + color: colors.blue, + }, + { + price: cointime.vaultedPrice, + ratio: cointime.vaultedPriceRatio, + name: "Vaulted", + title: "vaulted price", + color: colors.lime, + }, + { + price: cointime.activePrice, + ratio: cointime.activePriceRatio, + name: "Active", + title: "active price", + color: colors.rose, + }, + { + price: cointime.cointimePrice, + ratio: cointime.cointimePriceRatio, + name: "cointime", + title: "cointime price", color: colors.yellow, }, ]); - - const utxosAmountRanges = amountRanges.map( - ({ id, name, title, color }) => - /** @type {const} */ ({ - id: `utxos_${id}`, - name, - title: `UTXOs ${title}`, - color, - }), - ); - - const addressesAmountRanges = amountRanges.map( - ({ id, name, title, color }) => - /** @type {const} */ ({ - id: `addrs_${id}`, - name, - title: `Addresses ${title}`, - color, - }), - ); - - const type = /** @type {const} */ ([ - ["p2pk65", "Pay To Long Public id", "red"], - ["p2pk33", "Pay To Short Public id", "orange"], - ["p2pkh", "Pay To Public id Hash", "yellow"], - ["p2ms_outputs", "Pay To Bare Multisig", "lime"], - ["p2sh", "Pay To Script Hash", "green"], - ["p2wpkh", "Pay To Witness Public id Hash", "teal"], - ["p2wsh", "Pay To Witness Script Hash", "blue"], - ["p2tr", "Pay To Taproot", "indigo"], - ["p2a", "Pay To Anchor", "purple"], - ["unknown_outputs", "Pay To Unknown", "violet"], - ["empty_outputs", "Pay To Empty", "fuchsia"], - ]).map( - ([id, title, colorKey]) => - /** @type {const} */ ({ - id, - name: id.split("_")[0], - title, - color: colors[colorKey], - }), - ); - - const cointimePrices = /** @type {const} */ ([ - ["true_market_mean", "True market mean", "blue"], - ["vaulted_price", "Vaulted", "lime"], - ["active_price", "Active", "rose"], - ["cointime_price", "cointime", "yellow"], - ]).map( - ([metric, name, colorKey]) => - /** @type {const} */ ({ - metric, - name, - title: metric.replace(/_/g, " "), - color: colors[colorKey], - }), - ); - const cointimeCapitalizations = /** @type {const} */ ([ - ["vaulted", "lime"], - ["active", "rose"], - ["cointime", "yellow"], - ["investor", "fuchsia"], - ["thermo", "emerald"], - ]).map(([id, colorKey]) => { + [cointime.vaultedCap, "vaulted", "lime"], + [cointime.activeCap, "active", "rose"], + [cointime.cointimeCap, "cointime", "yellow"], + [cointime.investorCap, "investor", "fuchsia"], + [cointime.thermoCap, "thermo", "emerald"], + ]).map(([metric, name, colorKey]) => { return /** @type {const} */ ({ - metric: `${id}_cap`, - name: id, - title: `${id} Capitalization`, + metric, + name, + title: `${name} Capitalization`, color: colors[colorKey], }); }); + /** + * Get constant pattern by number dynamically from tree + * Examples: 0 → constant0, 38.2 → constant382, -1 → constantMinus1 + * @param {number} num + * @returns {Constant0Pattern} + */ + function getConstant(num) { + const constants = brk.tree.computed.constants; + const key = + num >= 0 + ? `constant${String(num).replace(".", "")}` + : `constantMinus${Math.abs(num)}`; + const constant = constants[key]; + if (!constant) throw new Error(`Unknown constant: ${num} (key: ${key})`); + return constant; + } + /** * @param {Object} args * @param {number} [args.number] @@ -557,9 +571,7 @@ export function createPartialOptions({ colors, brk }) { lineStyle, }) { return /** @satisfies {FetchedLineSeriesBlueprint} */ ({ - metric: `constant_${ - number >= 0 ? number : `minus_${Math.abs(number)}` - }`.replace(".", "_"), + metric: flattenConstant(getConstant(number)), title: name ?? `${number}`, unit, defaultActive, @@ -582,7 +594,7 @@ export function createPartialOptions({ colors, brk }) { return numbers.map( (number) => /** @satisfies {FetchedLineSeriesBlueprint} */ ({ - metric: `constant_${number >= 0 ? number : `minus_${Math.abs(number)}`}`, + metric: flattenConstant(getConstant(number)), title: `${number}`, unit, defaultActive: !number, @@ -596,23 +608,22 @@ export function createPartialOptions({ colors, brk }) { ); } + // ============================================================================ + // Tree-first helper functions + // These accept typed pattern objects from brk.tree and return series blueprints + // ============================================================================ + /** + * Create a single series from a tree accessor * @param {Object} args - * @param {Metric} args.metric - * @param {string} args.name + * @param {MetricAccessor} args.metric - Tree accessor with .by property + * @param {string} args.name - Display name for the series * @param {Color} [args.color] * @param {Unit} [args.unit] * @param {boolean} [args.defaultActive] * @param {LineSeriesPartialOptions} [args.options] */ - function createBaseSeries({ - metric, - name, - color, - defaultActive, - unit, - options, - }) { + function s({ metric, name, color, defaultActive, unit, options }) { return /** @satisfies {AnyFetchedSeriesBlueprint} */ ({ metric, title: name, @@ -624,115 +635,91 @@ export function createPartialOptions({ colors, brk }) { } /** - * @param {Metric} metric + * Create series from a BlockCountPattern ({ base, sum, cumulative }) + * @template T + * @param {BlockCountPattern} pattern + * @param {string} title + * @param {Color} [color] */ - function createAverageSeries(metric) { - return /** @satisfies {AnyFetchedSeriesBlueprint} */ ({ - metric: `${metric}_avg`, - title: "Average", - }); - } - - /** - * @param {Object} args - * @param {Metric} args.metric - * @param {Color} [args.sumColor] - * @param {Color} [args.cumulativeColor] - * @param {string} [args.common] - */ - function createSumCumulativeSeries({ - metric, - common, - sumColor, - cumulativeColor, - }) { + function fromBlockCount(pattern, title, color) { return /** @satisfies {AnyFetchedSeriesBlueprint[]} */ ([ - createSumSeries({ - metric, - title: common, - color: sumColor, - }), - createCumulativeSeries({ - metric, - title: common, - color: cumulativeColor, - }), + { metric: pattern.base, title, color: color ?? colors.default }, + { + metric: pattern.sum, + title: `${title} (sum)`, + color: colors.red, + defaultActive: false, + }, + { + metric: pattern.cumulative, + title: `${title} (cum.)`, + color: colors.cyan, + defaultActive: false, + }, ]); } /** - * @param {Object} args - * @param {Metric} args.metric - * @param {string} [args.title] - * @param {Color} [args.color] + * Create series from a BitcoinPattern ({ base, sum, cumulative, average, min, max, median, pct* }) + * @template T + * @param {BitcoinPattern} pattern + * @param {string} title + * @param {Color} [color] */ - function createSumSeries({ metric, title = "", color }) { - const metric_sum = `${metric}_sum`; - return /** @satisfies {AnyFetchedSeriesBlueprint} */ ({ - metric: brk.hasMetric(metric_sum) ? metric_sum : metric, - title: `Sum ${title}`, - color: color ?? colors.red, - }); - } - - /** - * @param {Object} args - * @param {Metric} args.metric - * @param {string} [args.title] - * @param {Color} [args.color] - */ - function createCumulativeSeries({ metric, title = "", color }) { - return /** @satisfies {AnyFetchedSeriesBlueprint} */ ({ - metric: `${metric}_cumulative`, - title: `Cumulative ${title}`, - color: color ?? colors.cyan, - defaultActive: false, - }); - } - - /** - * @param {Metric} metric - */ - function createMinMaxPercentilesSeries(metric) { + function fromBitcoin(pattern, title, color) { return /** @satisfies {AnyFetchedSeriesBlueprint[]} */ ([ + { metric: pattern.base, title, color: color ?? colors.default }, + { metric: pattern.average, title: "Average", defaultActive: false }, { - metric: `${metric}_max`, + metric: pattern.sum, + title: `${title} (sum)`, + color: colors.red, + defaultActive: false, + }, + { + metric: pattern.cumulative, + title: `${title} (cum.)`, + color: colors.cyan, + defaultActive: false, + }, + { + metric: pattern.max, title: "Max", color: colors.pink, defaultActive: false, }, { - metric: `${metric}_min`, + metric: pattern.min, title: "Min", color: colors.green, defaultActive: false, }, { - metric: `${metric}_median`, + metric: pattern.median, title: "Median", color: colors.amber, defaultActive: false, }, { - metric: `${metric}_pct75`, + metric: pattern.pct75, title: "pct75", color: colors.red, defaultActive: false, }, { - metric: `${metric}_pct25`, + metric: pattern.pct25, title: "pct25", color: colors.yellow, defaultActive: false, }, { - metric: `${metric}_pct90`, + metric: pattern.pct90, title: "pct90", color: colors.rose, defaultActive: false, }, { - metric: `${metric}_pct10`, + metric: pattern.pct10, title: "pct10", color: colors.lime, defaultActive: false, @@ -741,43 +728,118 @@ export function createPartialOptions({ colors, brk }) { } /** - * @param {Metric} metric + * Create series from a BlockSizePattern ({ sum, cumulative, average, min, max, median, pct* }) + * @template T + * @param {BlockSizePattern} pattern + * @param {string} title + * @param {Color} [color] */ - function createSumCumulativeMinMaxPercentilesSeries(metric) { - return [ - ...createSumCumulativeSeries({ metric }), - ...createMinMaxPercentilesSeries(metric), - ]; + function fromBlockSize(pattern, title, color) { + return /** @satisfies {AnyFetchedSeriesBlueprint[]} */ ([ + { metric: pattern.sum, title, color: color ?? colors.default }, + { metric: pattern.average, title: "Average", defaultActive: false }, + { + metric: pattern.cumulative, + title: `${title} (cum.)`, + color: colors.cyan, + defaultActive: false, + }, + { + metric: pattern.max, + title: "Max", + color: colors.pink, + defaultActive: false, + }, + { + metric: pattern.min, + title: "Min", + color: colors.green, + defaultActive: false, + }, + { + metric: pattern.median, + title: "Median", + color: colors.amber, + defaultActive: false, + }, + { + metric: pattern.pct75, + title: "pct75", + color: colors.red, + defaultActive: false, + }, + { + metric: pattern.pct25, + title: "pct25", + color: colors.yellow, + defaultActive: false, + }, + { + metric: pattern.pct90, + title: "pct90", + color: colors.rose, + defaultActive: false, + }, + { + metric: pattern.pct10, + title: "pct10", + color: colors.lime, + defaultActive: false, + }, + ]); } /** - * @param {Metric} metric + * Flatten a Constant0Pattern into a simple MetricAccessor + * Constant0Pattern has { dateindex: { by: {...} }, height: { by: {...} }, ... } + * This flattens it to { by: { dateindex: MetricNode, height: MetricNode, ... } } + * @param {Constant0Pattern} pattern + * @returns {MetricAccessor} */ - function createAverageSumCumulativeMinMaxPercentilesSeries(metric) { - return [ - createAverageSeries(metric), - ...createSumCumulativeMinMaxPercentilesSeries(metric), - ]; + function flattenConstant(pattern) { + return { + by: { + dateindex: pattern.dateindex.by.dateindex, + decadeindex: pattern.decadeindex.by.decadeindex, + height: pattern.height.by.height, + monthindex: pattern.monthindex.by.monthindex, + quarterindex: pattern.quarterindex.by.quarterindex, + semesterindex: pattern.semesterindex.by.semesterindex, + weekindex: pattern.weekindex.by.weekindex, + yearindex: pattern.yearindex.by.yearindex, + }, + }; } /** + * Create a constant line series * @param {Object} args - * @param {Metric} args.metric + * @param {Constant0Pattern} args.constant - The constant pattern from tree.constants * @param {string} args.name + * @param {Unit} args.unit + * @param {Color} [args.color] + * @param {number} [args.lineStyle] + * @param {boolean} [args.defaultActive] */ - function createBaseAverageSumCumulativeMinMaxPercentilesSeries({ - metric, - name, - }) { - return [ - createBaseSeries({ - metric, - name, - }), - ...createAverageSumCumulativeMinMaxPercentilesSeries(metric), - ]; + function line({ constant, name, unit, color, lineStyle, defaultActive }) { + return /** @satisfies {AnyFetchedSeriesBlueprint} */ ({ + metric: flattenConstant(constant), + title: name, + unit, + defaultActive, + color: color ?? colors.gray, + options: { + lineStyle: lineStyle ?? 4, + lastValueVisible: false, + crosshairMarkerVisible: false, + }, + }); } + // Shorthand for tree access + const tree = brk.tree.computed; + const constants = tree.constants; + const percentiles = [ { name: "pct1", @@ -843,17 +905,73 @@ export function createPartialOptions({ colors, brk }) { * @param {string} args.name * @param {string} args.legend * @param {string} args.title - * @param {Metric} args.metric + * @param {Indexes3} [args.price] - Separate price pattern (for ActivePriceRatioPattern style) + * @param {ActivePriceRatioPattern | EmaRatioPattern} args.ratio - Ratio pattern (tree-first) * @param {Color} [args.color] */ - function createPriceWithRatioOptions({ name, title, legend, metric, color }) { + function createPriceWithRatioOptions({ + name, + title, + legend, + price, + ratio, + color, + }) { + // Support both patterns: EmaRatioPattern has .price, ActivePriceRatioPattern needs separate price arg + const priceMetric = price ?? ratio.price; + + // Map percentile names to ratio pattern properties + const percentileUsdMap = /** @type {const} */ ([ + { name: "pct99", prop: ratio.ratioPct99Usd, color: colors.rose }, + { name: "pct98", prop: ratio.ratioPct98Usd, color: colors.pink }, + { name: "pct95", prop: ratio.ratioPct95Usd, color: colors.fuchsia }, + { name: "pct5", prop: ratio.ratioPct5Usd, color: colors.cyan }, + { name: "pct2", prop: ratio.ratioPct2Usd, color: colors.sky }, + { name: "pct1", prop: ratio.ratioPct1Usd, color: colors.blue }, + ]); + + const percentileMap = /** @type {const} */ ([ + { name: "pct99", prop: ratio.ratioPct99, color: colors.rose }, + { name: "pct98", prop: ratio.ratioPct98, color: colors.pink }, + { name: "pct95", prop: ratio.ratioPct95, color: colors.fuchsia }, + { name: "pct5", prop: ratio.ratioPct5, color: colors.cyan }, + { name: "pct2", prop: ratio.ratioPct2, color: colors.sky }, + { name: "pct1", prop: ratio.ratioPct1, color: colors.blue }, + ]); + + // SD patterns by window + const sdPatterns = /** @type {const} */ ([ + { nameAddon: "all", titleAddon: "", sd: ratio.ratioSd }, + { nameAddon: "4y", titleAddon: "4y", sd: ratio.ratio4ySd }, + { nameAddon: "2y", titleAddon: "2y", sd: ratio.ratio2ySd }, + { nameAddon: "1y", titleAddon: "1y", sd: ratio.ratio1ySd }, + ]); + + // SD band definitions with their pattern property accessors + /** @param {Ratio1ySdPattern} sd */ + const getSdBands = (sd) => [ + { name: "0σ", prop: sd._0sdUsd, color: colors.lime }, + { name: "+0.5σ", prop: sd.p05sdUsd, color: colors.yellow }, + { name: "+1σ", prop: sd.p1sdUsd, color: colors.amber }, + { name: "+1.5σ", prop: sd.p15sdUsd, color: colors.orange }, + { name: "+2σ", prop: sd.p2sdUsd, color: colors.red }, + { name: "+2.5σ", prop: sd.p25sdUsd, color: colors.rose }, + { name: "+3σ", prop: sd.p3sd, color: colors.pink }, // No USD variant for ±3σ + { name: "−0.5σ", prop: sd.m05sdUsd, color: colors.teal }, + { name: "−1σ", prop: sd.m1sdUsd, color: colors.cyan }, + { name: "−1.5σ", prop: sd.m15sdUsd, color: colors.sky }, + { name: "−2σ", prop: sd.m2sdUsd, color: colors.blue }, + { name: "−2.5σ", prop: sd.m25sdUsd, color: colors.indigo }, + { name: "−3σ", prop: sd.m3sd, color: colors.violet }, // No USD variant for ±3σ + ]; + return [ { name: "price", title, top: [ - createBaseSeries({ - metric: metric, + s({ + metric: priceMetric, name: legend, color, }), @@ -863,282 +981,208 @@ export function createPartialOptions({ colors, brk }) { name: "Ratio", title: `${title} Ratio`, top: [ - createBaseSeries({ - metric: metric, + s({ + metric: priceMetric, name: legend, color, }), - ...(brk.hasMetric(`${metric}_ratio_p1sd_usd`) - ? percentiles.map(({ name, color }) => - createBaseSeries({ - metric: `${metric}_ratio_${name}_usd`, - name, - color, - defaultActive: false, - options: { - lineStyle: 1, - }, - }), - ) - : []), + ...percentileUsdMap.map(({ name, prop, color }) => + s({ + metric: prop, + name, + color, + defaultActive: false, + options: { + lineStyle: 1, + }, + }), + ), ], bottom: [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${metric}_ratio`, + metric: ratio.ratio, title: "Ratio", type: "Baseline", options: { baseValue: { price: 1 }, }, }), - ...(brk.hasMetric(`${metric}_ratio_p1sd`) - ? percentiles.map(({ name, color }) => - createBaseSeries({ - metric: `${metric}_ratio_${name}`, - name, - color, - defaultActive: false, - options: { - lineStyle: 1, - }, - }), - ) - : []), - ...(brk.hasMetric(`${metric}_ratio_sma`) - ? ratioAverages.map(({ name, metric: metricAddon, color }) => - createBaseSeries({ - metric: `${metric}_ratio_${metricAddon}`, - name, - color, - defaultActive: false, - options: { - lineStyle: 1, - }, - }), - ) - : []), + ...percentileMap.map(({ name, prop, color }) => + s({ + metric: prop, + name, + color, + defaultActive: false, + options: { + lineStyle: 1, + }, + }), + ), + s({ + metric: ratio.ratio1wSma, + name: "1w SMA", + color: colors.orange, + defaultActive: false, + options: { + lineStyle: 1, + }, + }), + s({ + metric: ratio.ratio1mSma, + name: "1m SMA", + color: colors.red, + defaultActive: false, + options: { + lineStyle: 1, + }, + }), createPriceLine({ number: 1, unit: "ratio", }), ], }, - ...(brk.hasMetric(`${metric}_ratio_zscore`) - ? [ - { - name: "ZScores", - tree: [ - { - name: "compare", - title: `Compare ${title} ZScores`, - top: [ - createBaseSeries({ - metric: metric, - name: legend, - color, - }), - createBaseSeries({ - metric: `${metric}_ratio_1y_0sd_usd`, - name: "1y 0sd", - color: colors.fuchsia, - defaultActive: false, - }), - createBaseSeries({ - metric: `${metric}_ratio_2y_0sd_usd`, - name: "2y 0sd", - color: colors.purple, - defaultActive: false, - }), - createBaseSeries({ - metric: `${metric}_ratio_4y_0sd_usd`, - name: "4y 0sd", - color: colors.violet, - defaultActive: false, - }), - createBaseSeries({ - metric: `${metric}_ratio_0sd_usd`, - name: "0sd", - color: colors.indigo, - defaultActive: false, - }), - ], - bottom: [ - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${metric}_ratio_zscore`, - title: "All", - type: "Baseline", - }), - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${metric}_ratio_4y_zscore`, - colors: [colors.lime, colors.rose], - title: "4y", - type: "Baseline", - }), - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${metric}_ratio_2y_zscore`, - colors: [colors.avocado, colors.pink], - title: "2y", - type: "Baseline", - }), - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${metric}_ratio_1y_zscore`, - colors: [colors.yellow, colors.fuchsia], - title: "1Y", - type: "Baseline", - }), - ...createPriceLines({ - numbers: [0, 1, 2, 3, 4, -1, -2, -3, -4], - unit: "ratio", - }), - ], - }, - ...[ - { - nameAddon: "all", - titleAddon: "", - metricAddon: "", + { + name: "ZScores", + tree: [ + { + name: "compare", + title: `Compare ${title} ZScores`, + top: [ + s({ + metric: priceMetric, + name: legend, + color, + }), + s({ + metric: ratio.ratio1ySd._0sdUsd, + name: "1y 0sd", + color: colors.fuchsia, + defaultActive: false, + }), + s({ + metric: ratio.ratio2ySd._0sdUsd, + name: "2y 0sd", + color: colors.purple, + defaultActive: false, + }), + s({ + metric: ratio.ratio4ySd._0sdUsd, + name: "4y 0sd", + color: colors.violet, + defaultActive: false, + }), + s({ + metric: ratio.ratioSd._0sdUsd, + name: "0sd", + color: colors.indigo, + defaultActive: false, + }), + ], + bottom: [ + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: ratio.ratioSd.zscore, + title: "All", + type: "Baseline", + }), + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: ratio.ratio4ySd.zscore, + colors: [colors.lime, colors.rose], + title: "4y", + type: "Baseline", + }), + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: ratio.ratio2ySd.zscore, + colors: [colors.avocado, colors.pink], + title: "2y", + type: "Baseline", + }), + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: ratio.ratio1ySd.zscore, + colors: [colors.yellow, colors.fuchsia], + title: "1Y", + type: "Baseline", + }), + ...createPriceLines({ + numbers: [0, 1, 2, 3, 4, -1, -2, -3, -4], + unit: "ratio", + }), + ], + }, + ...sdPatterns.flatMap(({ nameAddon, titleAddon, sd }) => ({ + name: nameAddon, + title: `${title} ${titleAddon} ZScore`, + top: [ + s({ + metric: priceMetric, + name: legend, + color, + }), + ...getSdBands(sd).map(({ name, prop, color }) => + s({ + metric: prop, + name, + color, + defaultActive: false, + options: { + lineStyle: 1, }, - { - nameAddon: "4y", - titleAddon: "4y", - metricAddon: "4y_", - }, - { - nameAddon: "2y", - titleAddon: "2y", - metricAddon: "2y_", - }, - { - nameAddon: "1y", - titleAddon: "1y", - metricAddon: "1y_", - }, - ].flatMap(({ nameAddon, titleAddon, metricAddon }) => ({ - name: nameAddon, - title: `${title} ${titleAddon} ZScore`, - top: [ - createBaseSeries({ - metric, - name: legend, - color, - }), - ...[ - { sd: "0sd", name: "0σ", color: colors.lime }, - { - sd: `p0_5sd`, - name: "+0.5σ", - color: colors.yellow, - }, - { - sd: `p1sd`, - name: "+1σ", - color: colors.amber, - }, - { - sd: `p1_5sd`, - name: "+1.5σ", - color: colors.orange, - }, - { - sd: `p2sd`, - name: "+2σ", - color: colors.red, - }, - { - sd: `p2_5sd`, - name: "+2.5σ", - color: colors.rose, - }, - { - sd: `p3sd`, - name: "+3σ", - color: colors.pink, - }, - { - sd: `m0_5sd`, - name: "−0.5σ", - color: colors.teal, - }, - { - sd: `m1sd`, - name: "−1σ", - color: colors.cyan, - }, - { - sd: `m1_5sd`, - name: "−1.5σ", - color: colors.sky, - }, - { - sd: `m2sd`, - name: "−2σ", - color: colors.blue, - }, - { - sd: `m2_5sd`, - name: "−2.5σ", - color: colors.indigo, - }, - { - sd: `m3sd`, - name: "−3σ", - color: colors.violet, - }, - ].map(({ sd, name, color }) => - createBaseSeries({ - metric: `${metric}_ratio_${metricAddon}${sd}_usd`, - name, - color, - defaultActive: false, - options: { - lineStyle: 1, - }, - }), - ), - ], - bottom: [ - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${metric}_ratio_${metricAddon}zscore`, - title: "score", - type: "Baseline", - }), - ...createPriceLines({ - numbers: [0, 1, 2, 3, 4, -1, -2, -3, -4], - unit: "ratio", - }), - ], - })), - ], - }, - ] - : []), + }), + ), + ], + bottom: [ + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: sd.zscore, + title: "score", + type: "Baseline", + }), + ...createPriceLines({ + numbers: [0, 1, 2, 3, 4, -1, -2, -3, -4], + unit: "ratio", + }), + ], + })), + ], + }, ]; } /** - * @typedef {Object} UTXOGroupObject - * @property {string} args.name - * @property {string} args.title - * @property {Color} args.color - * @property {string} args.id + * @typedef {Object} UtxoCohortObject + * @property {string} name + * @property {string} title + * @property {Color} color + * @property {UtxoCohortPattern} tree */ /** - * @typedef {Object} UTXOGroupsObject - * @property {string} args.name - * @property {string} args.title - * @property {readonly UTXOGroupObject[]} args.list + * @typedef {Object} AddressCohortObject + * @property {string} name + * @property {string} title + * @property {Color} color + * @property {AddressCohortPattern} tree + */ + + /** @typedef {UtxoCohortObject | AddressCohortObject} CohortObject */ + + /** + * @typedef {Object} UtxoCohortGroupObject + * @property {string} name + * @property {string} title + * @property {readonly UtxoCohortObject[]} list */ /** - * @param {string} id + * @typedef {Object} AddressCohortGroupObject + * @property {string} name + * @property {string} title + * @property {readonly AddressCohortObject[]} list */ - function fixId(id) { - return id !== "" ? `${id}_` : ""; - } + + /** @typedef {UtxoCohortGroupObject | AddressCohortGroupObject} CohortGroupObject */ /** - * @param {UTXOGroupObject | UTXOGroupsObject} args + * @param {CohortObject | CohortGroupObject} args */ function createCohortGroupFolder(args) { const list = "list" in args ? args.list : [args]; @@ -1155,108 +1199,109 @@ export function createPartialOptions({ colors, brk }) { ? { name: "supply", title: `Supply ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}supply`, + s({ + metric: tree.supply.supply.sats, name: "Supply", color: colors.default, }), - createBaseSeries({ - metric: `${id}supply_btc`, + s({ + metric: tree.supply.supply.bitcoin, name: "Supply", color: colors.default, }), - createBaseSeries({ - metric: `${id}supply_usd`, + s({ + metric: tree.supply.supply.dollars, name: "Supply", color: colors.default, }), - ...(id + ...("supplyRelToCirculatingSupply" in tree.relative ? [ - createBaseSeries({ - metric: `${id}supply_rel_to_circulating_supply`, + s({ + metric: tree.relative.supplyRelToCirculatingSupply, name: "Supply", color: colors.default, }), ] : []), - createBaseSeries({ - metric: `${id}supply_in_profit`, + s({ + metric: tree.unrealized.supplyInProfit.sats, name: "In Profit", color: colors.green, }), - createBaseSeries({ - metric: `${id}supply_in_profit_btc`, + s({ + metric: tree.unrealized.supplyInProfit.bitcoin, name: "In Profit", color: colors.green, }), - createBaseSeries({ - metric: `${id}supply_in_profit_usd`, + s({ + metric: tree.unrealized.supplyInProfit.dollars, name: "In Profit", color: colors.green, }), - createBaseSeries({ - metric: `${id}supply_in_loss`, + s({ + metric: tree.unrealized.supplyInLoss.sats, name: "In Loss", color: colors.red, }), - createBaseSeries({ - metric: `${id}supply_in_loss_btc`, + s({ + metric: tree.unrealized.supplyInLoss.bitcoin, name: "In Loss", color: colors.red, }), - createBaseSeries({ - metric: `${id}supply_in_loss_usd`, + s({ + metric: tree.unrealized.supplyInLoss.dollars, name: "In Loss", color: colors.red, }), - createBaseSeries({ - metric: `${id}supply_half`, + s({ + metric: tree.supply.supplyHalf.sats, name: "half", color: colors.gray, options: { lineStyle: 4, }, }), - createBaseSeries({ - metric: `${id}supply_half_btc`, + s({ + metric: tree.supply.supplyHalf.bitcoin, name: useGroupName ? name : "half", color: "list" in args ? color : colors.gray, options: { lineStyle: 4, }, }), - createBaseSeries({ - metric: `${id}supply_half_usd`, + s({ + metric: tree.supply.supplyHalf.dollars, name: useGroupName ? name : "half", color: "list" in args ? color : colors.gray, options: { lineStyle: 4, }, }), - ...(id + ...("supplyInProfitRelToCirculatingSupply" in tree.relative ? [ - createBaseSeries({ - metric: `${id}supply_in_profit_rel_to_circulating_supply`, + s({ + metric: + tree.relative.supplyInProfitRelToCirculatingSupply, name: "In Profit", color: colors.green, }), - createBaseSeries({ - metric: `${id}supply_in_loss_rel_to_circulating_supply`, + s({ + metric: + tree.relative.supplyInLossRelToCirculatingSupply, name: "In Loss", color: colors.red, }), ] : []), - createBaseSeries({ - metric: `${id}supply_in_profit_rel_to_own_supply`, + s({ + metric: tree.relative.supplyInProfitRelToOwnSupply, name: "In Profit", color: colors.green, }), - createBaseSeries({ - metric: `${id}supply_in_loss_rel_to_own_supply`, + s({ + metric: tree.relative.supplyInLossRelToOwnSupply, name: "In Loss", color: colors.red, }), @@ -1279,33 +1324,32 @@ export function createPartialOptions({ colors, brk }) { { name: "total", title: `Supply ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}supply`, + s({ + metric: tree.supply.supply.sats, name, color, }), - createBaseSeries({ - metric: `${id}supply_btc`, + s({ + metric: tree.supply.supply.bitcoin, name, color, }), - createBaseSeries({ - metric: `${id}supply_usd`, + s({ + metric: tree.supply.supply.dollars, name, color, }), - id - ? createBaseSeries({ - metric: `${id}supply_rel_to_circulating_supply`, + "supplyRelToCirculatingSupply" in tree.relative + ? s({ + metric: tree.relative.supplyRelToCirculatingSupply, name, color, }) - : createBaseSeries({ + : s({ unit: "%all", - metric: "constant_100", + metric: constant100, name, color, }), @@ -1315,28 +1359,30 @@ export function createPartialOptions({ colors, brk }) { { name: "in profit", title: `Supply In Profit ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}supply_in_profit`, + s({ + metric: tree.unrealized.supplyInProfit.sats, name, color, }), - createBaseSeries({ - metric: `${id}supply_in_profit_btc`, + s({ + metric: tree.unrealized.supplyInProfit.bitcoin, name, color, }), - createBaseSeries({ - metric: `${id}supply_in_profit_usd`, + s({ + metric: tree.unrealized.supplyInProfit.dollars, name, color, }), - ...(id + ...("supplyInProfitRelToCirculatingSupply" in + tree.relative ? [ - createBaseSeries({ - metric: `${id}supply_in_profit_rel_to_circulating_supply`, + s({ + metric: + tree.relative + .supplyInProfitRelToCirculatingSupply, name, color, }), @@ -1348,28 +1394,29 @@ export function createPartialOptions({ colors, brk }) { { name: "in loss", title: `Supply In loss ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}supply_in_loss`, + s({ + metric: tree.unrealized.supplyInLoss.sats, name, color, }), - createBaseSeries({ - metric: `${id}supply_in_loss_btc`, + s({ + metric: tree.unrealized.supplyInLoss.bitcoin, name, color, }), - createBaseSeries({ - metric: `${id}supply_in_loss_usd`, + s({ + metric: tree.unrealized.supplyInLoss.dollars, name, color, }), - ...(id + ...("supplyInLossRelToCirculatingSupply" in tree.relative ? [ - createBaseSeries({ - metric: `${id}supply_in_loss_rel_to_circulating_supply`, + s({ + metric: + tree.relative + .supplyInLossRelToCirculatingSupply, name, color, }), @@ -1383,43 +1430,39 @@ export function createPartialOptions({ colors, brk }) { { name: "utxo count", title: `UTXO Count ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}utxo_count`, + s({ + metric: tree.supply.utxoCount, name: useGroupName ? name : "Count", color, }), ]); }), }, - ...(list.filter(({ id }) => brk.hasMetric(`${fixId(id)}addr_count`)) - .length > ("list" in args ? 1 : 0) + ...(list.filter(({ tree }) => tree.addrCount).length > + ("list" in args ? 1 : 0) ? !("list" in args) || - list.filter(({ id }) => - brk.hasMetric(`${fixId(id)}empty_addr_count`), - ).length <= 1 + list.filter(({ tree }) => tree.emptyAddrCount).length <= 1 ? [ { name: "address count", title: `Address Count ${title}`, - bottom: list.flatMap(({ name, color, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ name, color, tree }) => { return [ - ...(brk.hasMetric(`${id}addr_count`) + ...(tree.addrCount ? /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}addr_count`, + s({ + metric: tree.addrCount, name: useGroupName ? name : "Loaded", color: useGroupName ? color : colors.orange, }), ]) : []), - ...(brk.hasMetric(`${id}empty_addr_count`) + ...(tree.emptyAddrCount ? /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}empty_addr_count`, + s({ + metric: tree.emptyAddrCount, name: "Empty", color: colors.gray, defaultActive: false, @@ -1438,36 +1481,28 @@ export function createPartialOptions({ colors, brk }) { name: "loaded", title: `Loaded Address Count ${title}`, bottom: list - .filter(({ id }) => - brk.hasMetric(`${fixId(id)}addr_count`), - ) - .flatMap(({ name, color, id: _id }) => { - const id = fixId(_id); + .filter(({ tree }) => tree.addrCount) + .flatMap(({ name, color, tree }) => { return [ - createBaseSeries({ - metric: `${id}addr_count`, + s({ + metric: tree.addrCount, name, color, }), ]; }), }, - ...(list.filter(({ id }) => - brk.hasMetric(`${fixId(id)}empty_addr_count`), - ).length + ...(list.filter(({ tree }) => tree.emptyAddrCount).length ? [ { name: "empty", title: `Empty Address Count ${title}`, bottom: list - .filter(({ id }) => - brk.hasMetric(`${fixId(id)}empty_addr_count`), - ) - .flatMap(({ name, color, id: _id }) => { - const id = fixId(_id); + .filter(({ tree }) => tree.emptyAddrCount) + .flatMap(({ name, color, tree }) => { return [ - createBaseSeries({ - metric: `${id}empty_addr_count`, + s({ + metric: tree.emptyAddrCount, name, color, }), @@ -1488,9 +1523,9 @@ export function createPartialOptions({ colors, brk }) { { name: "Price", title: `Realized Price ${title}`, - top: list.map(({ color, name, id }) => - createBaseSeries({ - metric: `${fixId(id)}realized_price`, + top: list.map(({ color, name, tree }) => + s({ + metric: tree.realizedPrice, name, color, }), @@ -1500,9 +1535,9 @@ export function createPartialOptions({ colors, brk }) { name: "Ratio", title: `Realized Price Ratio ${title}`, bottom: [ - ...list.map(({ color, name, id }) => - createBaseSeries({ - metric: `${fixId(id)}realized_price_ratio`, + ...list.map(({ color, name, tree }) => + s({ + metric: tree.realizedPriceExtra.ratio, name, color, }), @@ -1516,7 +1551,8 @@ export function createPartialOptions({ colors, brk }) { ] : createPriceWithRatioOptions({ title: `Realized Price ${title}`, - metric: `${fixId(args.id)}realized_price`, + price: args.tree.realizedPrice, + ratio: args.tree.realizedPriceExtra, name: "price", legend: "realized", color: args.color, @@ -1524,11 +1560,10 @@ export function createPartialOptions({ colors, brk }) { { name: "capitalization", title: `Realized Capitalization ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}realized_cap`, + s({ + metric: tree.realized.realizedCap, name: useGroupName ? name : "Capitalization", color, }), @@ -1536,7 +1571,7 @@ export function createPartialOptions({ colors, brk }) { ? [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${id}realized_cap_30d_delta`, + metric: tree.realized.realizedCap30dDelta, title: "30d change", defaultActive: false, }), @@ -1547,11 +1582,11 @@ export function createPartialOptions({ colors, brk }) { ] : []), ...(!("list" in args) && - brk.hasMetric(`${id}realized_cap_rel_to_own_market_cap`) + tree.realized?.realizedCapRelToOwnMarketCap ? [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${id}realized_cap_rel_to_own_market_cap`, + metric: tree.realized.realizedCapRelToOwnMarketCap, title: "ratio", options: { baseValue: { price: 100 } }, colors: [colors.red, colors.green], @@ -1572,72 +1607,66 @@ export function createPartialOptions({ colors, brk }) { name: "pnl", title: `Realized Profit And Loss ${title}`, bottom: [ - createBaseSeries({ - metric: `${fixId(args.id)}realized_profit`, + s({ + metric: args.tree.realized.realizedProfit.base, name: "Profit", color: colors.green, }), - createBaseSeries({ - metric: `${fixId(args.id)}realized_loss`, + s({ + metric: args.tree.realized.realizedLoss.base, name: "Loss", color: colors.red, defaultActive: false, }), - ...(brk.hasMetric( - `${fixId(args.id)}realized_profit_to_loss_ratio`, - ) + ...(args.tree.realized?.realizedProfitToLossRatio ? [ - createBaseSeries({ - metric: `${fixId( - args.id, - )}realized_profit_to_loss_ratio`, + s({ + metric: + args.tree.realized.realizedProfitToLossRatio, name: "proft / loss", color: colors.yellow, }), ] : []), - createBaseSeries({ - metric: `${fixId(args.id)}total_realized_pnl`, + s({ + metric: args.tree.realized.totalRealizedPnl.base, name: "Total", color: colors.default, defaultActive: false, }), - createBaseSeries({ - metric: `${fixId(args.id)}neg_realized_loss`, + s({ + metric: args.tree.realized.negRealizedLoss.base, name: "Negative Loss", color: colors.red, }), - createBaseSeries({ - metric: `${fixId(args.id)}realized_profit_cumulative`, + s({ + metric: args.tree.realized.realizedProfit.cumulative, name: "Cumulative Profit", color: colors.green, defaultActive: false, }), - createBaseSeries({ - metric: `${fixId(args.id)}realized_loss_cumulative`, + s({ + metric: args.tree.realized.realizedLoss.cumulative, name: "Cumulative Loss", color: colors.red, defaultActive: false, }), - createBaseSeries({ - metric: `${fixId(args.id)}neg_realized_loss_cumulative`, + s({ + metric: args.tree.realized.negRealizedLoss.cumulative, name: "Cumulative Negative Loss", color: colors.red, defaultActive: false, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - args.id, - )}realized_profit_rel_to_realized_cap`, + metric: + args.tree.realized.realizedProfitRelToRealizedCap, title: "Profit", color: colors.green, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - args.id, - )}realized_loss_rel_to_realized_cap`, + metric: args.tree.realized.realizedLossRelToRealizedCap, title: "Loss", color: colors.red, }), @@ -1653,46 +1682,42 @@ export function createPartialOptions({ colors, brk }) { { name: "Net pnl", title: `Net Realized Profit And Loss ${title}`, - bottom: list.flatMap(({ color, name, id }) => [ + bottom: list.flatMap(({ color, name, tree }) => [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId(id)}net_realized_pnl`, + metric: tree.realized.netRealizedPnl.base, title: "Raw", }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId(id)}net_realized_pnl_cumulative`, + metric: tree.realized.netRealizedPnl.cumulative, title: "Cumulative", defaultActive: false, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_realized_pnl_cumulative_30d_delta`, + metric: tree.realized.netRealizedPnlCumulative30dDelta, title: "cumulative 30d change", defaultActive: false, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_realized_pnl_rel_to_realized_cap`, + metric: tree.realized.netRealizedPnlRelToRealizedCap, title: "Raw", }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap`, + metric: + tree.realized + .netRealizedPnlCumulative30dDeltaRelToRealizedCap, title: "cumulative 30d change", defaultActive: false, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_realized_pnl_cumulative_30d_delta_rel_to_market_cap`, + metric: + tree.realized + .netRealizedPnlCumulative30dDeltaRelToMarketCap, title: "cumulative 30d change", }), createPriceLine({ @@ -1709,13 +1734,11 @@ export function createPartialOptions({ colors, brk }) { { name: "sopr", title: `Spent Output Profit Ratio ${title}`, - bottom: list.flatMap(({ color, name, id }) => { - const soprKey = `${fixId(id)}sopr`; - const asoprKey = `${fixId(id)}adjusted_sopr`; + bottom: list.flatMap(({ color, name, tree }) => { return [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: soprKey, + metric: tree.realized.sopr, title: "normal", options: { baseValue: { @@ -1723,11 +1746,11 @@ export function createPartialOptions({ colors, brk }) { }, }, }), - ...(brk.hasMetric(asoprKey) + ...(tree.realized?.adjustedSopr ? [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: asoprKey, + metric: tree.realized.adjustedSopr, title: "adjusted", colors: [colors.yellow, colors.fuchsia], defaultActive: false, @@ -1741,7 +1764,7 @@ export function createPartialOptions({ colors, brk }) { : []), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${soprKey}_7d_ema`, + metric: tree.realized.sopr7dEma, title: "7d ema", colors: [colors.lime, colors.rose], defaultActive: false, @@ -1751,11 +1774,11 @@ export function createPartialOptions({ colors, brk }) { }, }, }), - ...(brk.hasMetric(asoprKey) + ...(tree.realized?.adjustedSopr7dEma ? [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${asoprKey}_7d_ema`, + metric: tree.realized.adjustedSopr7dEma, title: "adj. 7d ema", colors: [colors.amber, colors.purple], defaultActive: false, @@ -1769,7 +1792,7 @@ export function createPartialOptions({ colors, brk }) { : []), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${soprKey}_30d_ema`, + metric: tree.realized.sopr30dEma, title: "30d ema", colors: [colors.avocado, colors.pink], defaultActive: false, @@ -1779,11 +1802,11 @@ export function createPartialOptions({ colors, brk }) { }, }, }), - ...(brk.hasMetric(asoprKey) + ...(tree.realized?.adjustedSopr30dEma ? [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${asoprKey}_30d_ema`, + metric: tree.realized.adjustedSopr30dEma, title: "adj. 30d ema", colors: [colors.orange, colors.violet], defaultActive: false, @@ -1808,17 +1831,17 @@ export function createPartialOptions({ colors, brk }) { name: "profit", title: `Realized Profit ${title}`, bottom: [ - ...list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + ...list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}realized_profit`, + s({ + metric: tree.realized.realizedProfit.base, name, color, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${id}realized_profit_rel_to_realized_cap`, + metric: + tree.realized.realizedProfitRelToRealizedCap, title: name, color, }), @@ -1833,17 +1856,16 @@ export function createPartialOptions({ colors, brk }) { name: "loss", title: `Realized Loss ${title}`, bottom: [ - ...list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + ...list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}realized_loss`, + s({ + metric: tree.realized.realizedLoss.base, name, color, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${id}realized_loss_rel_to_realized_cap`, + metric: tree.realized.realizedLossRelToRealizedCap, title: name, color, }), @@ -1858,20 +1880,18 @@ export function createPartialOptions({ colors, brk }) { name: "Total pnl", title: `Total Realized Profit And Loss Loss ${title}`, bottom: [ - ...list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + ...list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}total_realized_pnl`, + s({ + metric: tree.realized.totalRealizedPnl.base, name, color, }), - ...(brk.hasMetric( - `${id}realized_profit_to_loss_ratio`, - ) + ...(tree.realized?.realizedProfitToLossRatio ? [ - createBaseSeries({ - metric: `${id}realized_profit_to_loss_ratio`, + s({ + metric: + tree.realized.realizedProfitToLossRatio, name, color, }), @@ -1885,18 +1905,16 @@ export function createPartialOptions({ colors, brk }) { name: "Net pnl", title: `Net Realized Profit And Loss ${title}`, bottom: [ - ...list.flatMap(({ color, name, id }) => [ + ...list.flatMap(({ color, name, tree }) => [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId(id)}net_realized_pnl`, + metric: tree.realized.netRealizedPnl.base, title: name, color, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_realized_pnl_rel_to_realized_cap`, + metric: tree.realized.netRealizedPnlRelToRealizedCap, title: name, color, }), @@ -1915,11 +1933,10 @@ export function createPartialOptions({ colors, brk }) { { name: "profit", title: `Cumulative Realized Profit ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}realized_profit_cumulative`, + s({ + metric: tree.realized.realizedProfit.cumulative, name, color, }), @@ -1929,11 +1946,10 @@ export function createPartialOptions({ colors, brk }) { { name: "loss", title: `Cumulative Realized Loss ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}realized_loss_cumulative`, + s({ + metric: tree.realized.realizedLoss.cumulative, name, color, }), @@ -1944,10 +1960,10 @@ export function createPartialOptions({ colors, brk }) { name: "Net pnl", title: `Cumulative Net Realized Profit And Loss ${title}`, bottom: [ - ...list.flatMap(({ color, name, id }) => [ + ...list.flatMap(({ color, name, tree }) => [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId(id)}net_realized_pnl_cumulative`, + metric: tree.realized.netRealizedPnl.cumulative, title: name, color, defaultActive: false, @@ -1962,28 +1978,27 @@ export function createPartialOptions({ colors, brk }) { name: "Net pnl 30d change", title: `Cumulative Net Realized Profit And Loss 30 Day Change ${title}`, bottom: [ - ...list.flatMap(({ color, name, id }) => [ + ...list.flatMap(({ color, name, tree }) => [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_realized_pnl_cumulative_30d_delta`, + metric: + tree.realized.netRealizedPnlCumulative30dDelta, title: name, color, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap`, + metric: + tree.realized + .netRealizedPnlCumulative30dDeltaRelToRealizedCap, title: name, color, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_realized_pnl_cumulative_30d_delta_rel_to_market_cap`, + metric: + tree.realized + .netRealizedPnlCumulative30dDeltaRelToMarketCap, title: name, color, }), @@ -2008,10 +2023,10 @@ export function createPartialOptions({ colors, brk }) { name: "Normal", title: `Spent Output Profit Ratio ${title}`, bottom: [ - ...list.flatMap(({ color, name, id }) => [ + ...list.flatMap(({ color, name, tree }) => [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId(id)}sopr`, + metric: tree.realized.sopr, title: name, color, }), @@ -2024,12 +2039,12 @@ export function createPartialOptions({ colors, brk }) { }, ...(() => { const reducedList = list - .map(({ color, name, id }) => ({ + .filter(({ tree }) => tree.realized?.adjustedSopr) + .map(({ color, name, tree }) => ({ color, name, - metric: `${fixId(id)}adjusted_sopr`, - })) - .filter(({ metric }) => brk.hasMetric(metric)); + metric: tree.realized.adjustedSopr, + })); return reducedList.length ? [ @@ -2063,28 +2078,28 @@ export function createPartialOptions({ colors, brk }) { name: "Sell Side Risk", title: `Sell Side Risk Ratio ${title}`, bottom: !("list" in args) - ? list.flatMap(({ id }) => [ - createBaseSeries({ - metric: `${fixId(id)}sell_side_risk_ratio`, + ? list.flatMap(({ tree }) => [ + s({ + metric: tree.realized.sellSideRiskRatio, name: "raw", color: colors.orange, }), - createBaseSeries({ - metric: `${fixId(id)}sell_side_risk_ratio_7d_ema`, + s({ + metric: tree.realized.sellSideRiskRatio7dEma, name: "7d ema", color: colors.red, defaultActive: false, }), - createBaseSeries({ - metric: `${fixId(id)}sell_side_risk_ratio_30d_ema`, + s({ + metric: tree.realized.sellSideRiskRatio30dEma, name: "30d ema", color: colors.rose, defaultActive: false, }), ]) - : list.flatMap(({ color, name, id }) => [ - createBaseSeries({ - metric: `${fixId(id)}sell_side_risk_ratio`, + : list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.realized.sellSideRiskRatio, name, color: color, }), @@ -2098,19 +2113,17 @@ export function createPartialOptions({ colors, brk }) { { name: "created", title: `value created ${title}`, - bottom: list.flatMap(({ color, name, id }) => { - const normalKey = `${fixId(id)}value_created`; - const adjKey = `${fixId(id)}adjusted_value_created`; + bottom: list.flatMap(({ color, name, tree }) => { return [ - createBaseSeries({ - metric: normalKey, + s({ + metric: tree.realized.valueCreated, name: "normal", color: colors.emerald, }), - ...(brk.hasMetric(adjKey) + ...(tree.realized?.adjustedValueCreated ? [ - createBaseSeries({ - metric: adjKey, + s({ + metric: tree.realized.adjustedValueCreated, name: "adjusted", color: colors.lime, }), @@ -2122,19 +2135,18 @@ export function createPartialOptions({ colors, brk }) { { name: "destroyed", title: `value destroyed ${title}`, - bottom: list.flatMap(({ color, name, id }) => { - const normalKey = `${fixId(id)}value_destroyed`; - const adjKey = `${fixId(id)}adjusted_value_destroyed`; + bottom: list.flatMap(({ color, name, tree }) => { return [ - createBaseSeries({ - metric: normalKey, + s({ + metric: tree.realized.valueDestroyed, name: "normal", color: colors.red, }), - ...(brk.hasMetric(adjKey) + ...(tree.realized?.adjustedValueDestroyed ? [ - createBaseSeries({ - metric: adjKey, + s({ + metric: + tree.realized.adjustedValueDestroyed, name: "adjusted", color: colors.pink, }), @@ -2151,9 +2163,9 @@ export function createPartialOptions({ colors, brk }) { { name: "Normal", title: `Value Created ${title}`, - bottom: list.flatMap(({ color, name, id }) => [ - createBaseSeries({ - metric: `${fixId(id)}value_created`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.realized.valueCreated, name, color, }), @@ -2161,12 +2173,15 @@ export function createPartialOptions({ colors, brk }) { }, ...(() => { const reducedList = list - .map(({ color, name, id }) => ({ + .filter( + ({ tree }) => + tree.realized?.adjustedValueCreated, + ) + .map(({ color, name, tree }) => ({ color, name, - metric: `${fixId(id)}adjusted_value_created`, - })) - .filter(({ metric }) => brk.hasMetric(metric)); + metric: tree.realized.adjustedValueCreated, + })); return reducedList.length ? [ { @@ -2174,7 +2189,7 @@ export function createPartialOptions({ colors, brk }) { title: `Adjusted value created ${title}`, bottom: reducedList.map( ({ color, name, metric }) => - createBaseSeries({ + s({ metric, name, color, @@ -2192,9 +2207,9 @@ export function createPartialOptions({ colors, brk }) { { name: "Normal", title: `Value destroyed ${title}`, - bottom: list.flatMap(({ color, name, id }) => [ - createBaseSeries({ - metric: `${fixId(id)}value_destroyed`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.realized.valueDestroyed, name, color, }), @@ -2202,12 +2217,15 @@ export function createPartialOptions({ colors, brk }) { }, ...(() => { const reducedList = list - .map(({ color, name, id }) => ({ + .filter( + ({ tree }) => + tree.realized?.adjustedValueDestroyed, + ) + .map(({ color, name, tree }) => ({ color, name, - metric: `${fixId(id)}adjusted_value_destroyed`, - })) - .filter(({ metric }) => brk.hasMetric(metric)); + metric: tree.realized.adjustedValueDestroyed, + })); return reducedList.length ? [ { @@ -2215,7 +2233,7 @@ export function createPartialOptions({ colors, brk }) { title: `Adjusted value destroyed ${title}`, bottom: reducedList.map( ({ color, name, metric }) => - createBaseSeries({ + s({ metric, name, color, @@ -2241,74 +2259,67 @@ export function createPartialOptions({ colors, brk }) { name: "pnl", title: `Unrealized Profit And Loss ${title}`, bottom: [ - createBaseSeries({ - metric: `${fixId(args.id)}total_unrealized_pnl`, + s({ + metric: args.tree.unrealized.totalUnrealizedPnl, name: "total", color: colors.default, }), - createBaseSeries({ - metric: `${fixId(args.id)}unrealized_profit`, + s({ + metric: args.tree.unrealized.unrealizedProfit, name: "Profit", color: colors.green, }), - createBaseSeries({ - metric: `${fixId(args.id)}unrealized_loss`, + s({ + metric: args.tree.unrealized.unrealizedLoss, name: "Loss", color: colors.red, defaultActive: false, }), - createBaseSeries({ - metric: `${fixId(args.id)}neg_unrealized_loss`, + s({ + metric: args.tree.unrealized.negUnrealizedLoss, name: "Negative Loss", color: colors.red, }), - createBaseSeries({ - metric: `${fixId( - args.id, - )}unrealized_profit_rel_to_market_cap`, + s({ + metric: + args.tree.relative.unrealizedProfitRelToMarketCap, name: "Profit", color: colors.green, }), - createBaseSeries({ - metric: `${fixId( - args.id, - )}unrealized_loss_rel_to_market_cap`, + s({ + metric: args.tree.relative.unrealizedLossRelToMarketCap, name: "Loss", color: colors.red, defaultActive: false, }), - createBaseSeries({ - metric: `${fixId( - args.id, - )}neg_unrealized_loss_rel_to_market_cap`, + s({ + metric: + args.tree.relative.negUnrealizedLossRelToMarketCap, name: "Negative Loss", color: colors.red, }), - ...(brk.hasMetric( - `${fixId( - args.id, - )}unrealized_profit_rel_to_own_market_cap`, - ) + ...("unrealizedProfitRelToOwnMarketCap" in + args.tree.relative ? [ - createBaseSeries({ - metric: `${fixId( - args.id, - )}unrealized_profit_rel_to_own_market_cap`, + s({ + metric: + args.tree.relative + .unrealizedProfitRelToOwnMarketCap, name: "Profit", color: colors.green, }), - createBaseSeries({ - metric: `${fixId( - args.id, - )}unrealized_loss_rel_to_own_market_cap`, + s({ + metric: + args.tree.relative + .unrealizedLossRelToOwnMarketCap, name: "Loss", color: colors.red, defaultActive: false, }), - createBaseSeries({ - metric: `${fixId( - args.id, - )}neg_unrealized_loss_rel_to_own_market_cap`, + s({ + metric: + args.tree.relative + .negUnrealizedLossRelToOwnMarketCap, name: "Negative Loss", color: colors.red, }), @@ -2321,31 +2332,28 @@ export function createPartialOptions({ colors, brk }) { }), ] : []), - ...(brk.hasMetric( - `${fixId( - args.id, - )}unrealized_profit_rel_to_own_total_unrealized_pnl`, - ) + ...("unrealizedProfitRelToOwnTotalUnrealizedPnl" in + args.tree.relative ? [ - createBaseSeries({ - metric: `${fixId( - args.id, - )}unrealized_profit_rel_to_own_total_unrealized_pnl`, + s({ + metric: + args.tree.relative + .unrealizedProfitRelToOwnTotalUnrealizedPnl, name: "Profit", color: colors.green, }), - createBaseSeries({ - metric: `${fixId( - args.id, - )}unrealized_loss_rel_to_own_total_unrealized_pnl`, + s({ + metric: + args.tree.relative + .unrealizedLossRelToOwnTotalUnrealizedPnl, name: "Loss", color: colors.red, defaultActive: false, }), - createBaseSeries({ - metric: `${fixId( - args.id, - )}neg_unrealized_loss_rel_to_own_total_unrealized_pnl`, + s({ + metric: + args.tree.relative + .negUnrealizedLossRelToOwnTotalUnrealizedPnl, name: "Negative Loss", color: colors.red, }), @@ -2373,11 +2381,10 @@ export function createPartialOptions({ colors, brk }) { { name: "profit", title: `Unrealized Profit ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}unrealized_profit`, + s({ + metric: tree.unrealized.unrealizedProfit, name, color, }), @@ -2387,11 +2394,10 @@ export function createPartialOptions({ colors, brk }) { { name: "loss", title: `Unrealized Loss ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}unrealized_loss`, + s({ + metric: tree.unrealized.unrealizedLoss, name, color, }), @@ -2401,11 +2407,10 @@ export function createPartialOptions({ colors, brk }) { { name: "total pnl", title: `Unrealized Total Profit And Loss ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}total_unrealized_pnl`, + s({ + metric: tree.unrealized.totalUnrealizedPnl, name, color, }), @@ -2417,28 +2422,25 @@ export function createPartialOptions({ colors, brk }) { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, bottom: [ - ...list.flatMap(({ color, name, id }) => [ + ...list.flatMap(({ color, name, tree }) => [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId(id)}net_unrealized_pnl`, + metric: tree.unrealized.netUnrealizedPnl, title: useGroupName ? name : "Net", color: useGroupName ? color : undefined, }), /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId(id)}net_unrealized_pnl_rel_to_market_cap`, + metric: tree.relative.netUnrealizedPnlRelToMarketCap, title: useGroupName ? name : "Net", color: useGroupName ? color : undefined, }), - ...(brk.hasMetric( - `${fixId(id)}net_unrealized_pnl_rel_to_own_market_cap`, - ) + ...("netUnrealizedPnlRelToOwnMarketCap" in tree.relative ? [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_unrealized_pnl_rel_to_own_market_cap`, + metric: + tree.relative.netUnrealizedPnlRelToOwnMarketCap, title: useGroupName ? name : "Net", color: useGroupName ? color : undefined, }), @@ -2447,17 +2449,14 @@ export function createPartialOptions({ colors, brk }) { }), ] : []), - ...(brk.hasMetric( - `${fixId( - id, - )}net_unrealized_pnl_rel_to_own_total_unrealized_pnl`, - ) + ...("netUnrealizedPnlRelToOwnTotalUnrealizedPnl" in + tree.relative ? [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ type: "Baseline", - metric: `${fixId( - id, - )}net_unrealized_pnl_rel_to_own_total_unrealized_pnl`, + metric: + tree.relative + .netUnrealizedPnlRelToOwnTotalUnrealizedPnl, title: useGroupName ? name : "Net", color: useGroupName ? color : undefined, }), @@ -2485,11 +2484,10 @@ export function createPartialOptions({ colors, brk }) { { name: "Average", title: `Average Cost Basis ${title}`, - top: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + top: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}realized_price`, + s({ + metric: tree.realizedPrice, name, color: color, }), @@ -2499,11 +2497,10 @@ export function createPartialOptions({ colors, brk }) { { name: "Min", title: `Min Cost Basis ${title}`, - top: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + top: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}min_price_paid`, + s({ + metric: tree.pricePaid.minPricePaid, name, color: color, }), @@ -2513,11 +2510,10 @@ export function createPartialOptions({ colors, brk }) { { name: "Max", title: `Max Cost Basis ${title}`, - top: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + top: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}max_price_paid`, + s({ + metric: tree.pricePaid.maxPricePaid, name, color: color, }), @@ -2532,19 +2528,19 @@ export function createPartialOptions({ colors, brk }) { name: "Cost Basis", title: `Costs Basis ${title}`, top: [ - createBaseSeries({ - metric: `${fixId(args.id)}realized_price`, + s({ + metric: args.tree.realizedPrice, name: "Average", color: args.color, }), - createBaseSeries({ - metric: `${fixId(args.id)}min_price_paid`, + s({ + metric: args.tree.pricePaid.minPricePaid, name: "Min", color: colors.green, defaultActive: false, }), - createBaseSeries({ - metric: `${fixId(args.id)}max_price_paid`, + s({ + metric: args.tree.pricePaid.maxPricePaid, name: "Max", color: colors.red, }), @@ -2559,16 +2555,15 @@ export function createPartialOptions({ colors, brk }) { { name: "Sum", title: `Sum of Coins Destroyed ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}coinblocks_destroyed`, + s({ + metric: tree.activity.coinblocksDestroyed.base, name, color, }), - createBaseSeries({ - metric: `${id}coindays_destroyed`, + s({ + metric: tree.activity.coindaysDestroyed.base, name, color, }), @@ -2578,16 +2573,15 @@ export function createPartialOptions({ colors, brk }) { { name: "Cumulative", title: `Cumulative Coins Destroyed ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}coinblocks_destroyed_cumulative`, + s({ + metric: tree.activity.coinblocksDestroyed.cumulative, name, color, }), - createBaseSeries({ - metric: `${id}coindays_destroyed_cumulative`, + s({ + metric: tree.activity.coindaysDestroyed.cumulative, name, color, }), @@ -2601,27 +2595,26 @@ export function createPartialOptions({ colors, brk }) { { name: "Coins Destroyed", title: `Coins Destroyed ${title}`, - bottom: list.flatMap(({ color, name, id: _id }) => { - const id = fixId(_id); + bottom: list.flatMap(({ color, name, tree }) => { return /** @type {const} */ ([ - createBaseSeries({ - metric: `${id}coinblocks_destroyed`, + s({ + metric: tree.activity.coinblocksDestroyed.base, name: "sum", color, }), - createBaseSeries({ - metric: `${id}coinblocks_destroyed_cumulative`, + s({ + metric: tree.activity.coinblocksDestroyed.cumulative, name: "cumulative", color, defaultActive: false, }), - createBaseSeries({ - metric: `${id}coindays_destroyed`, + s({ + metric: tree.activity.coindaysDestroyed.base, name: "sum", color, }), - createBaseSeries({ - metric: `${id}coindays_destroyed_cumulative`, + s({ + metric: tree.activity.coindaysDestroyed.cumulative, name: "cumulative", color, defaultActive: false, @@ -2658,7 +2651,7 @@ export function createPartialOptions({ colors, brk }) { name: "Capitalization", title: "Market Capitalization", bottom: [ - createBaseSeries({ + s({ metric: "market_cap", name: "Capitalization", }), @@ -2671,27 +2664,27 @@ export function createPartialOptions({ colors, brk }) { // { // name: "Value", top: [ - createBaseSeries({ + s({ metric: "price_ath", name: "ath", }), ], bottom: [ - createBaseSeries({ + s({ metric: "price_drawdown", name: "Drawdown", color: colors.red, }), - createBaseSeries({ + s({ metric: "days_since_price_ath", name: "since", }), - createBaseSeries({ + s({ metric: "max_days_between_price_aths", name: "Max", color: colors.red, }), - createBaseSeries({ + s({ metric: "max_years_between_price_aths", name: "Max", color: colors.red, @@ -2715,18 +2708,18 @@ export function createPartialOptions({ colors, brk }) { { name: "Compare", title: `Market Price ${nameAddon} Moving Averages`, - top: averages.map(({ days, id, name, color }) => - createBaseSeries({ - metric: `price_${id}_${metricAddon}`, + top: averages.map(({ id, color, sma, ema }) => + s({ + metric: (metricAddon === "sma" ? sma : ema).price, name: id, color, }), ), }, - ...averages.map(({ id, name, color }) => ({ + ...averages.map(({ name, color, sma, ema }) => ({ name, tree: createPriceWithRatioOptions({ - metric: `price_${id}_${metricAddon}`, + ratio: metricAddon === "sma" ? sma : ema, name, title: `${name} Market Price ${nameAddon} Moving Average`, legend: "average", @@ -2739,32 +2732,31 @@ export function createPartialOptions({ colors, brk }) { { name: "Performance", tree: /** @type {const} */ ([ - "1d", - "1w", - "1m", - "3m", - "6m", - "1y", - "2y", - "3y", - "4y", - "5y", - "6y", - "8y", - "10y", - ]).map((id) => { + ["1d", market._1dPriceReturns], + ["1w", market._1wPriceReturns], + ["1m", market._1mPriceReturns], + ["3m", market._3mPriceReturns], + ["6m", market._6mPriceReturns], + ["1y", market._1yPriceReturns], + ["2y", market._2yPriceReturns, market._2yCagr], + ["3y", market._3yPriceReturns, market._3yCagr], + ["4y", market._4yPriceReturns, market._4yCagr], + ["5y", market._5yPriceReturns, market._5yCagr], + ["6y", market._6yPriceReturns, market._6yCagr], + ["8y", market._8yPriceReturns, market._8yCagr], + ["10y", market._10yPriceReturns, market._10yCagr], + ]).map(([id, priceReturns, cagr]) => { const name = periodIdToName(id, true); - const cagr = `${id}_cagr`; return { name, title: `${name} Performance`, bottom: [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${id}_price_returns`, + metric: priceReturns, title: "total", type: "Baseline", }), - ...(brk.hasMetric(cagr) + ...(cagr ? [ /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ metric: cagr, @@ -2788,17 +2780,17 @@ export function createPartialOptions({ colors, brk }) { name: "Volatility", title: "Bitcoin Price Volatility Index", bottom: [ - createBaseSeries({ + s({ metric: "price_1w_volatility", name: "1w", color: colors.red, }), - createBaseSeries({ + s({ metric: "price_1m_volatility", name: "1m", color: colors.orange, }), - createBaseSeries({ + s({ metric: "price_1y_volatility", name: "1y", color: colors.lime, @@ -2828,12 +2820,12 @@ export function createPartialOptions({ colors, brk }) { name: metric, title: `Bitcoin Price ${title} MinMax Bands`, top: [ - createBaseSeries({ + s({ metric: `price_${metric}_min`, name: "min", color: colors.red, }), - createBaseSeries({ + s({ metric: `price_${metric}_max`, name: "max", color: colors.green, @@ -2845,7 +2837,7 @@ export function createPartialOptions({ colors, brk }) { name: "True range", title: "Bitcoin Price True Range", bottom: [ - createBaseSeries({ + s({ metric: "price_true_range", name: "value", color: colors.yellow, @@ -2856,7 +2848,7 @@ export function createPartialOptions({ colors, brk }) { name: "Choppiness", title: "Bitcoin Price Choppiness Index", bottom: [ - createBaseSeries({ + s({ metric: "price_2w_choppiness_index", name: "2w", color: colors.red, @@ -2875,17 +2867,17 @@ export function createPartialOptions({ colors, brk }) { name: "Mayer multiple", title: "Mayer multiple", top: [ - createBaseSeries({ + s({ metric: `price_200d_sma`, name: "200d sma", color: colors.yellow, }), - createBaseSeries({ + s({ metric: `price_200d_sma_x2_4`, name: "200d sma x2.4", color: colors.green, }), - createBaseSeries({ + s({ metric: `price_200d_sma_x0_8`, name: "200d sma x0.8", color: colors.red, @@ -2901,104 +2893,208 @@ export function createPartialOptions({ colors, brk }) { name: "DCA vs Lump sum", tree: [ .../** @type {const} */ ([ - "1w", - "1m", - "3m", - "6m", - "1y", - ]).map((id) => { - const name = periodIdToName(id, true); + [ + "1w", + market._1wDcaAvgPrice, + market.price1wAgo, + market._1wDcaReturns, + market._1wPriceReturns, + ], + [ + "1m", + market._1mDcaAvgPrice, + market.price1mAgo, + market._1mDcaReturns, + market._1mPriceReturns, + ], + [ + "3m", + market._3mDcaAvgPrice, + market.price3mAgo, + market._3mDcaReturns, + market._3mPriceReturns, + ], + [ + "6m", + market._6mDcaAvgPrice, + market.price6mAgo, + market._6mDcaReturns, + market._6mPriceReturns, + ], + [ + "1y", + market._1yDcaAvgPrice, + market.price1yAgo, + market._1yDcaReturns, + market._1yPriceReturns, + ], + ]).map( + ([ + id, + dcaAvgPrice, + priceAgo, + dcaReturns, + priceReturns, + ]) => { + const name = periodIdToName(id, true); - return /** @satisfies {PartialChartOption} */ ({ - name, - title: `${name} DCA vs Lump Sum Returns`, - top: [ - createBaseSeries({ - metric: `${id}_dca_avg_price`, - name: `dca`, - color: colors.orange, - }), - createBaseSeries({ - metric: `price_${id}_ago`, - name: `lump sum`, - color: colors.cyan, - }), - ], - bottom: [ - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${id}_dca_returns`, - title: "dca", - type: "Baseline", - colors: [colors.yellow, colors.pink], - }), - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${id}_price_returns`, - title: "lump sum", - type: "Baseline", - }), - createPriceLine({ - unit: "percentage", - }), - ], - }); - }), + return /** @satisfies {PartialChartOption} */ ({ + name, + title: `${name} DCA vs Lump Sum Returns`, + top: [ + s({ + metric: dcaAvgPrice, + name: `dca`, + color: colors.orange, + }), + s({ + metric: priceAgo, + name: `lump sum`, + color: colors.cyan, + }), + ], + bottom: [ + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: dcaReturns, + title: "dca", + type: "Baseline", + colors: [colors.yellow, colors.pink], + }), + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: priceReturns, + title: "lump sum", + type: "Baseline", + }), + createPriceLine({ + unit: "percentage", + }), + ], + }); + }, + ), .../** @type {const} */ ([ - "2y", - "3y", - "4y", - "5y", - "6y", - "8y", - "10y", - ]).map((id) => { - const name = periodIdToName(id, true); - return /** @satisfies {PartialChartOption} */ ({ - name, - title: `${name} DCA vs Lump Sum Returns`, - top: [ - createBaseSeries({ - metric: `${id}_dca_avg_price`, - name: `dca`, - color: colors.orange, - }), - createBaseSeries({ - metric: `price_${id}_ago`, - name: `lump sum`, - color: colors.cyan, - }), - ], - bottom: [ - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${id}_dca_returns`, - title: "dca", - type: "Baseline", - colors: [colors.yellow, colors.pink], - }), + [ + "2y", + market._2yDcaAvgPrice, + market.price2yAgo, + market._2yDcaReturns, + market._2yPriceReturns, + market._2yDcaCagr, + market._2yCagr, + ], + [ + "3y", + market._3yDcaAvgPrice, + market.price3yAgo, + market._3yDcaReturns, + market._3yPriceReturns, + market._3yDcaCagr, + market._3yCagr, + ], + [ + "4y", + market._4yDcaAvgPrice, + market.price4yAgo, + market._4yDcaReturns, + market._4yPriceReturns, + market._4yDcaCagr, + market._4yCagr, + ], + [ + "5y", + market._5yDcaAvgPrice, + market.price5yAgo, + market._5yDcaReturns, + market._5yPriceReturns, + market._5yDcaCagr, + market._5yCagr, + ], + [ + "6y", + market._6yDcaAvgPrice, + market.price6yAgo, + market._6yDcaReturns, + market._6yPriceReturns, + market._6yDcaCagr, + market._6yCagr, + ], + [ + "8y", + market._8yDcaAvgPrice, + market.price8yAgo, + market._8yDcaReturns, + market._8yPriceReturns, + market._8yDcaCagr, + market._8yCagr, + ], + [ + "10y", + market._10yDcaAvgPrice, + market.price10yAgo, + market._10yDcaReturns, + market._10yPriceReturns, + market._10yDcaCagr, + market._10yCagr, + ], + ]).map( + ([ + id, + dcaAvgPrice, + priceAgo, + dcaReturns, + priceReturns, + dcaCagr, + cagr, + ]) => { + const name = periodIdToName(id, true); + return /** @satisfies {PartialChartOption} */ ({ + name, + title: `${name} DCA vs Lump Sum Returns`, + top: [ + s({ + metric: dcaAvgPrice, + name: `dca`, + color: colors.orange, + }), + s({ + metric: priceAgo, + name: `lump sum`, + color: colors.cyan, + }), + ], + bottom: [ + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: dcaReturns, + title: "dca", + type: "Baseline", + colors: [colors.yellow, colors.pink], + }), - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${id}_price_returns`, - title: "lump sum", - type: "Baseline", - }), - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${id}_dca_cagr`, - title: "dca cagr", - type: "Baseline", - colors: [colors.yellow, colors.pink], - defaultActive: false, - }), - /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ - metric: `${id}_cagr`, - title: "lump sum cagr", - type: "Baseline", - defaultActive: false, - }), - createPriceLine({ - unit: "percentage", - }), - ], - }); - }), + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: priceReturns, + title: "lump sum", + type: "Baseline", + }), + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: dcaCagr, + title: "dca cagr", + type: "Baseline", + colors: [colors.yellow, colors.pink], + defaultActive: false, + }), + /** @satisfies {FetchedBaselineSeriesBlueprint} */ ({ + metric: cagr, + title: "lump sum cagr", + type: "Baseline", + defaultActive: false, + }), + createPriceLine({ + unit: "percentage", + }), + ], + }); + }, + ), ], }, { @@ -3008,7 +3104,7 @@ export function createPartialOptions({ colors, brk }) { name: "Compare", title: "DCA Classes", top: dcaClasses.map(({ year, color, defaultActive }) => - createBaseSeries({ + s({ metric: `dca_class_${year}_avg_price`, name: `${year}`, color, @@ -3022,7 +3118,7 @@ export function createPartialOptions({ colors, brk }) { name: `${year}`, title: `DCA Since ${year}`, top: [ - createBaseSeries({ + s({ metric: `dca_class_${year}_avg_price`, name: "cost basis", color, @@ -3059,7 +3155,7 @@ export function createPartialOptions({ colors, brk }) { ...createSumCumulativeSeries({ metric: "block_count", }), - createBaseSeries({ + s({ metric: "block_count_target", name: "Target", color: colors.gray, @@ -3067,19 +3163,19 @@ export function createPartialOptions({ colors, brk }) { lineStyle: 4, }, }), - createBaseSeries({ + s({ metric: "1w_block_count", name: "1w sum", color: colors.red, defaultActive: false, }), - createBaseSeries({ + s({ metric: "1m_block_count", name: "1m sum", color: colors.pink, defaultActive: false, }), - createBaseSeries({ + s({ metric: "1y_block_count", name: "1y sum", color: colors.purple, @@ -3091,7 +3187,7 @@ export function createPartialOptions({ colors, brk }) { name: "Interval", title: "Block Interval", bottom: [ - createBaseSeries({ + s({ metric: "interval", name: "Interval", }), @@ -3108,15 +3204,15 @@ export function createPartialOptions({ colors, brk }) { name: "Size", title: "Block Size", bottom: [ - createBaseSeries({ + s({ metric: "total_size", name: "raw", }), - createBaseSeries({ + s({ metric: "vbytes", name: "raw", }), - createBaseSeries({ + s({ metric: "weight", name: "raw", }), @@ -3150,31 +3246,31 @@ export function createPartialOptions({ colors, brk }) { name: "Volume", title: "Transaction Volume", bottom: [ - createBaseSeries({ + s({ metric: "sent", name: "Sent", }), - createBaseSeries({ + s({ metric: "sent_btc", name: "Sent", }), - createBaseSeries({ + s({ metric: "sent_usd", name: "Sent", }), - createBaseSeries({ + s({ metric: "annualized_volume", name: "annualized", color: colors.red, defaultActive: false, }), - createBaseSeries({ + s({ metric: "annualized_volume_btc", name: "annualized", color: colors.red, defaultActive: false, }), - createBaseSeries({ + s({ metric: "annualized_volume_usd", name: "annualized", color: colors.lime, @@ -3212,11 +3308,11 @@ export function createPartialOptions({ colors, brk }) { name: "Velocity", title: "Transactions Velocity", bottom: [ - createBaseSeries({ + s({ metric: "tx_btc_velocity", name: "bitcoin", }), - createBaseSeries({ + s({ metric: "tx_usd_velocity", name: "dollars", color: colors.emerald, @@ -3227,7 +3323,7 @@ export function createPartialOptions({ colors, brk }) { name: "Speed", title: "Transactions Per Second", bottom: [ - createBaseSeries({ + s({ metric: "tx_per_sec", name: "Transactions", }), @@ -3251,7 +3347,7 @@ export function createPartialOptions({ colors, brk }) { name: "Speed", title: "Inputs Per Second", bottom: [ - createBaseSeries({ + s({ metric: "inputs_per_sec", name: "Inputs", }), @@ -3283,7 +3379,7 @@ export function createPartialOptions({ colors, brk }) { name: "Speed", title: "Outputs Per Second", bottom: [ - createBaseSeries({ + s({ metric: "outputs_per_sec", name: "Outputs", }), @@ -3306,15 +3402,15 @@ export function createPartialOptions({ colors, brk }) { name: "Supply", title: "Circulating Supply", bottom: [ - createBaseSeries({ + s({ metric: "supply", name: "Mined", }), - createBaseSeries({ + s({ metric: "supply_btc", name: "Mined", }), - createBaseSeries({ + s({ metric: "supply_usd", name: "Mined", }), @@ -3357,7 +3453,7 @@ export function createPartialOptions({ colors, brk }) { name: "Subsidy", }, ), - createBaseSeries({ + s({ metric: "subsidy_usd_1y_sma", name: "1y sma", }), @@ -3394,12 +3490,12 @@ export function createPartialOptions({ colors, brk }) { name: "Dominance", title: "Reward Dominance", bottom: [ - createBaseSeries({ + s({ metric: "fee_dominance", name: "Fee", color: colors.amber, }), - createBaseSeries({ + s({ metric: "subsidy_dominance", name: "Subsidy", color: colors.red, @@ -3425,7 +3521,7 @@ export function createPartialOptions({ colors, brk }) { name: "Inflation", title: "Inflation Rate", bottom: [ - createBaseSeries({ + s({ metric: "inflation_rate", name: "Inflation", }), @@ -3435,7 +3531,7 @@ export function createPartialOptions({ colors, brk }) { name: "Puell multiple", title: "Puell multiple", bottom: [ - createBaseSeries({ + s({ metric: "puell_multiple", name: "Multiple", }), @@ -3455,15 +3551,15 @@ export function createPartialOptions({ colors, brk }) { name: "Halving", title: "Halving Epoch", bottom: [ - createBaseSeries({ + s({ metric: "halvingepoch", name: "Halving", }), - createBaseSeries({ + s({ metric: "blocks_before_next_halving", name: "Before next", }), - createBaseSeries({ + s({ metric: "days_before_next_halving", name: "Before next", }), @@ -3473,19 +3569,19 @@ export function createPartialOptions({ colors, brk }) { name: "Difficulty", title: "Difficulty", bottom: [ - createBaseSeries({ + s({ metric: "difficulty", name: "Value", }), - createBaseSeries({ + s({ metric: "difficultyepoch", name: "Difficulty", }), - createBaseSeries({ + s({ metric: "blocks_before_next_difficulty_adjustment", name: "Before next", }), - createBaseSeries({ + s({ metric: "days_before_next_difficulty_adjustment", name: "Before next", }), @@ -3509,35 +3605,35 @@ export function createPartialOptions({ colors, brk }) { name: "Rate", title: "Hash Rate", bottom: [ - createBaseSeries({ + s({ metric: "hash_rate", name: "Raw", }), - createBaseSeries({ + s({ metric: "hash_rate_1w_sma", name: "1w sma", color: colors.red, defaultActive: false, }), - createBaseSeries({ + s({ metric: "hash_rate_1m_sma", name: "1m sma", color: colors.pink, defaultActive: false, }), - createBaseSeries({ + s({ metric: "hash_rate_2m_sma", name: "2m sma", color: colors.purple, defaultActive: false, }), - createBaseSeries({ + s({ metric: "hash_rate_1y_sma", name: "1y sma", color: colors.indigo, defaultActive: false, }), - createBaseSeries({ + s({ metric: "difficulty_as_hash", name: "difficulty", color: colors.default, @@ -3551,22 +3647,22 @@ export function createPartialOptions({ colors, brk }) { name: "Price", title: "Hash Price", bottom: [ - createBaseSeries({ + s({ metric: "hash_price_ths", name: "Dollars", color: colors.emerald, }), - createBaseSeries({ + s({ metric: "hash_price_phs", name: "Dollars", color: colors.emerald, }), - createBaseSeries({ + s({ metric: "hash_price_rebound", name: "Rebound", color: colors.yellow, }), - createBaseSeries({ + s({ metric: "hash_price_ths_min", name: "Min", color: colors.red, @@ -3574,7 +3670,7 @@ export function createPartialOptions({ colors, brk }) { lineStyle: 1, }, }), - createBaseSeries({ + s({ metric: "hash_price_phs_min", name: "Min", color: colors.red, @@ -3588,22 +3684,22 @@ export function createPartialOptions({ colors, brk }) { name: "Value", title: "Hash Value", bottom: [ - createBaseSeries({ + s({ metric: "hash_value_ths", name: "Sats", color: colors.orange, }), - createBaseSeries({ + s({ metric: "hash_value_phs", name: "Sats", color: colors.orange, }), - createBaseSeries({ + s({ metric: "hash_value_rebound", name: "Rebound", color: colors.yellow, }), - createBaseSeries({ + s({ metric: "hash_value_ths_min", name: "Min", color: colors.red, @@ -3611,7 +3707,7 @@ export function createPartialOptions({ colors, brk }) { lineStyle: 1, }, }), - createBaseSeries({ + s({ metric: "hash_value_phs_min", name: "Min", color: colors.red, @@ -3625,9 +3721,10 @@ export function createPartialOptions({ colors, brk }) { }, { name: "Pools", - tree: Object.entries(brk.POOL_ID_TO_POOL_NAME).map( - ([_id, name]) => { - const id = /** @type {PoolId} */ (_id); + tree: entries(brk.tree.computed.pools.vecs).map( + ([key, pool]) => { + const name = + brk.POOL_ID_TO_POOL_NAME[key.toLowerCase()] || key; return { name, tree: [ @@ -3635,30 +3732,30 @@ export function createPartialOptions({ colors, brk }) { name: "Dominance", title: `Mining Dominance of ${name}`, bottom: [ - createBaseSeries({ - metric: `${id}_1d_dominance`, + s({ + metric: pool._1dDominance.base, name: "1d", color: colors.rose, defaultActive: false, }), - createBaseSeries({ - metric: `${id}_1w_dominance`, + s({ + metric: pool._1wDominance, name: "1w", color: colors.red, defaultActive: false, }), - createBaseSeries({ - metric: `${id}_1m_dominance`, + s({ + metric: pool._1mDominance, name: "1m", }), - createBaseSeries({ - metric: `${id}_1y_dominance`, + s({ + metric: pool._1yDominance, name: "1y", color: colors.lime, defaultActive: false, }), - createBaseSeries({ - metric: `${id}_dominance`, + s({ + metric: pool.dominance.base, name: "all time", color: colors.teal, defaultActive: false, @@ -3669,29 +3766,29 @@ export function createPartialOptions({ colors, brk }) { name: "Blocks mined", title: `Blocks mined by ${name}`, bottom: [ - createBaseSeries({ - metric: `${id}_blocks_mined`, + s({ + metric: pool.blocksMined.base, name: "Sum", }), - createBaseSeries({ - metric: `${id}_blocks_mined_cumulative`, + s({ + metric: pool.blocksMined.cumulative, name: "Cumulative", color: colors.blue, }), - createBaseSeries({ - metric: `${id}_1w_blocks_mined`, + s({ + metric: pool._1wBlocksMined, name: "1w Sum", color: colors.red, defaultActive: false, }), - createBaseSeries({ - metric: `${id}_1m_blocks_mined`, + s({ + metric: pool._1mBlocksMined, name: "1m Sum", color: colors.pink, defaultActive: false, }), - createBaseSeries({ - metric: `${id}_1y_blocks_mined`, + s({ + metric: pool._1yBlocksMined, name: "1y Sum", color: colors.purple, defaultActive: false, @@ -3703,37 +3800,45 @@ export function createPartialOptions({ colors, brk }) { title: `Rewards collected by ${name}`, bottom: [ { - metricAddon: "coinbase", + pattern: pool.coinbase, + label: "coinbase", cumulativeColor: colors.red, sumColor: colors.orange, }, { - metricAddon: "subsidy", + pattern: pool.subsidy, + label: "subsidy", cumulativeColor: colors.emerald, sumColor: colors.lime, }, { - metricAddon: "fee", + pattern: pool.fee, + label: "fee", cumulativeColor: colors.indigo, sumColor: colors.cyan, }, ].flatMap( - ({ metricAddon, sumColor, cumulativeColor }) => [ + ({ + pattern, + label, + sumColor, + cumulativeColor, + }) => [ ...createSumCumulativeSeries({ - metric: `${id}_${metricAddon}`, - common: metricAddon, + metric: pattern.sats, + common: label, sumColor, cumulativeColor, }), ...createSumCumulativeSeries({ - metric: `${id}_${metricAddon}_btc`, - common: metricAddon, + metric: pattern.bitcoin, + common: label, sumColor, cumulativeColor, }), ...createSumCumulativeSeries({ - metric: `${id}_${metricAddon}_usd`, - common: metricAddon, + metric: pattern.dollars, + common: label, sumColor, cumulativeColor, }), @@ -3744,8 +3849,8 @@ export function createPartialOptions({ colors, brk }) { name: "Days since block", title: `Days since ${name} mined a block`, bottom: [ - createBaseSeries({ - metric: `${id}_days_since_block`, + s({ + metric: pool.daysSinceBlock, name: "Since block", }), ], @@ -3764,15 +3869,15 @@ export function createPartialOptions({ colors, brk }) { name: "supply", title: "Unspendable Supply", bottom: [ - createBaseSeries({ + s({ metric: "unspendable_supply", name: "Supply", }), - createBaseSeries({ + s({ metric: "unspendable_supply_btc", name: "Supply", }), - createBaseSeries({ + s({ metric: "unspendable_supply_usd", name: "Supply", }), @@ -3785,15 +3890,15 @@ export function createPartialOptions({ colors, brk }) { name: "outputs", title: "op_return outputs", bottom: [ - createBaseSeries({ + s({ metric: "opreturn_count", name: "Count", }), - createBaseSeries({ + s({ metric: "opreturn_count", name: "sum", }), - createBaseSeries({ + s({ metric: "opreturn_count_cumulative", name: "cumulative", color: colors.red, @@ -3804,15 +3909,15 @@ export function createPartialOptions({ colors, brk }) { name: "supply", title: "OP_return Supply", bottom: [ - createBaseSeries({ + s({ metric: "opreturn_supply", name: "Supply", }), - createBaseSeries({ + s({ metric: "opreturn_supply_btc", name: "Supply", }), - createBaseSeries({ + s({ metric: "opreturn_supply_usd", name: "Supply", }), @@ -3971,24 +4076,27 @@ export function createPartialOptions({ colors, brk }) { { name: "Compare", title: "Compare Cointime Prices", - top: cointimePrices.map(({ metric, name, color }) => - createBaseSeries({ - metric, + top: cointimePrices.map(({ price, name, color }) => + s({ + metric: price, name, color, }), ), }, - ...cointimePrices.map(({ metric, name, color, title }) => ({ - name, - tree: createPriceWithRatioOptions({ - metric, - legend: name, - color, + ...cointimePrices.map( + ({ price, ratio, name, color, title }) => ({ name, - title, + tree: createPriceWithRatioOptions({ + price, + ratio, + legend: name, + color, + name, + title, + }), }), - })), + ), ], }, { @@ -3998,18 +4106,18 @@ export function createPartialOptions({ colors, brk }) { name: "Compare", title: "Compare Cointime Capitalizations", bottom: [ - createBaseSeries({ + s({ metric: `market_cap`, name: "Market", color: colors.default, }), - createBaseSeries({ + s({ metric: `realized_cap`, name: "Realized", color: colors.orange, }), ...cointimeCapitalizations.map(({ metric, name, color }) => - createBaseSeries({ + s({ metric, name, color, @@ -4022,17 +4130,17 @@ export function createPartialOptions({ colors, brk }) { name, title, bottom: [ - createBaseSeries({ + s({ metric, name, color, }), - createBaseSeries({ + s({ metric: `market_cap`, name: "Market", color: colors.default, }), - createBaseSeries({ + s({ metric: `realized_cap`, name: "Realized", color: colors.orange, @@ -4046,57 +4154,30 @@ export function createPartialOptions({ colors, brk }) { name: "Supply", title: "Cointime Supply", bottom: /** @type {const} */ ([ - { - name: "all", - color: colors.orange, - }, - { - name: "vaulted", - color: colors.lime, - }, - { name: "active", color: colors.rose }, - ]).flatMap( - ({ name, color }) => - /** @type {const} */ ([ - createBaseSeries({ - metric: `${ - name !== "all" ? /** @type {const} */ (`${name}_`) : "" - }supply`, - name, - color, - }), - createBaseSeries({ - metric: `${ - name !== "all" ? /** @type {const} */ (`${name}_`) : "" - }supply_btc`, - name, - color, - }), - createBaseSeries({ - metric: `${ - name !== "all" ? /** @type {const} */ (`${name}_`) : "" - }supply_usd`, - name, - color, - }), - ]), - ), + [utxoCohorts.all.supply.supply, "all", colors.orange], + [cointime.vaultedSupply, "vaulted", colors.lime], + [cointime.activeSupply, "active", colors.rose], + ]).flatMap(([supply, name, color]) => [ + s({ metric: supply.sats, name, color }), + s({ metric: supply.bitcoin, name, color }), + s({ metric: supply.dollars, name, color }), + ]), }, { name: "Liveliness & Vaultedness", title: "Liveliness & Vaultedness", bottom: [ - createBaseSeries({ + s({ metric: "liveliness", name: "Liveliness", color: colors.rose, }), - createBaseSeries({ + s({ metric: "vaultedness", name: "Vaultedness", color: colors.lime, }), - createBaseSeries({ + s({ metric: "activity_to_vaultedness_ratio", name: "Liveliness / Vaultedness", color: colors.purple, @@ -4107,34 +4188,34 @@ export function createPartialOptions({ colors, brk }) { name: "Coinblocks", title: "Coinblocks", bottom: [ - createBaseSeries({ + s({ metric: "coinblocks_destroyed", name: "Destroyed", color: colors.red, }), - createBaseSeries({ + s({ metric: "coinblocks_destroyed_cumulative", name: "Cumulative Destroyed", color: colors.red, defaultActive: false, }), - createBaseSeries({ + s({ metric: "coinblocks_created", name: "created", color: colors.orange, }), - createBaseSeries({ + s({ metric: "coinblocks_created_cumulative", name: "Cumulative created", color: colors.orange, defaultActive: false, }), - createBaseSeries({ + s({ metric: "coinblocks_stored", name: "stored", color: colors.green, }), - createBaseSeries({ + s({ metric: "coinblocks_stored_cumulative", name: "Cumulative stored", color: colors.green, @@ -4149,12 +4230,12 @@ export function createPartialOptions({ colors, brk }) { name: "inflation", title: "Cointime-Adjusted inflation rate", bottom: [ - createBaseSeries({ + s({ metric: "inflation_rate", name: "base", color: colors.orange, }), - createBaseSeries({ + s({ metric: "cointime_adj_inflation_rate", name: "base", color: colors.purple, @@ -4165,22 +4246,22 @@ export function createPartialOptions({ colors, brk }) { name: "Velocity", title: "Cointime-Adjusted transactions velocity", bottom: [ - createBaseSeries({ + s({ metric: "tx_btc_velocity", name: "btc", color: colors.orange, }), - createBaseSeries({ + s({ metric: "cointime_adj_tx_btc_velocity", name: "adj. btc", color: colors.red, }), - createBaseSeries({ + s({ metric: "tx_usd_velocity", name: "usd", color: colors.emerald, }), - createBaseSeries({ + s({ metric: "cointime_adj_tx_usd_velocity", name: "adj. usd", color: colors.lime, diff --git a/websites/bitview/scripts/options/partial/chain.js b/websites/bitview/scripts/options/partial/chain.js new file mode 100644 index 000000000..38379b1cf --- /dev/null +++ b/websites/bitview/scripts/options/partial/chain.js @@ -0,0 +1,485 @@ +/** Chain section builder - typed tree-based patterns */ + +/** + * Create Chain section + * @param {PartialContext} ctx + * @returns {PartialOptionsGroup} + */ +export function createChainSection(ctx) { + const { colors, brk, s, createPriceLine } = ctx; + const { blocks, transactions, pools, inputs, outputs, market, scripts, supply } = brk.tree.computed; + const { indexed } = brk.tree; + + /** + * Create sum/cumulative series from a BlockCountPattern + * @template T + * @param {BlockCountPattern} pattern + * @param {string} name + * @param {Color} [sumColor] + * @param {Color} [cumulativeColor] + * @param {Unit} unit + */ + const fromBlockCount = (pattern, name, unit, sumColor, cumulativeColor) => [ + s({ metric: pattern.base, name: `${name} sum`, color: sumColor, unit }), + s({ metric: pattern.cumulative, name: `${name} cumulative`, color: cumulativeColor ?? colors.blue, unit, defaultActive: false }), + ]; + + /** + * Create series from BlockSizePattern (has average, min, max, percentiles) + * @template T + * @param {BlockSizePattern} pattern + * @param {string} name + * @param {Unit} unit + */ + const fromBlockSize = (pattern, name, unit) => [ + s({ metric: pattern.average, name: `${name} avg`, unit }), + s({ metric: pattern.sum, name: `${name} sum`, color: colors.blue, unit, defaultActive: false }), + s({ metric: pattern.cumulative, name: `${name} cumulative`, color: colors.indigo, unit, defaultActive: false }), + s({ metric: pattern.min, name: `${name} min`, color: colors.red, unit, defaultActive: false }), + s({ metric: pattern.max, name: `${name} max`, color: colors.green, unit, defaultActive: false }), + s({ metric: pattern.pct10, name: `${name} pct10`, color: colors.rose, unit, defaultActive: false }), + s({ metric: pattern.pct25, name: `${name} pct25`, color: colors.pink, unit, defaultActive: false }), + s({ metric: pattern.median, name: `${name} median`, color: colors.purple, unit, defaultActive: false }), + s({ metric: pattern.pct75, name: `${name} pct75`, color: colors.violet, unit, defaultActive: false }), + s({ metric: pattern.pct90, name: `${name} pct90`, color: colors.fuchsia, unit, defaultActive: false }), + ]; + + /** + * Create series from BlockIntervalPattern (has average, min, max, percentiles) + * @template T + * @param {BlockIntervalPattern} pattern + * @param {string} name + * @param {Unit} unit + */ + const fromBlockInterval = (pattern, name, unit) => [ + s({ metric: pattern.average, name: `${name} avg`, unit }), + s({ metric: pattern.min, name: `${name} min`, color: colors.red, unit, defaultActive: false }), + s({ metric: pattern.max, name: `${name} max`, color: colors.green, unit, defaultActive: false }), + s({ metric: pattern.pct10, name: `${name} pct10`, color: colors.rose, unit, defaultActive: false }), + s({ metric: pattern.pct25, name: `${name} pct25`, color: colors.pink, unit, defaultActive: false }), + s({ metric: pattern.median, name: `${name} median`, color: colors.purple, unit, defaultActive: false }), + s({ metric: pattern.pct75, name: `${name} pct75`, color: colors.violet, unit, defaultActive: false }), + s({ metric: pattern.pct90, name: `${name} pct90`, color: colors.fuchsia, unit, defaultActive: false }), + ]; + + /** + * Create series from BitcoinPattern (has base, cumulative) + * @template T + * @param {BitcoinPattern} pattern + * @param {string} name + * @param {Unit} unit + * @param {Color} [sumColor] + * @param {Color} [cumulativeColor] + */ + const fromBitcoin = (pattern, name, unit, sumColor, cumulativeColor) => [ + s({ metric: pattern.base, name: `${name}`, color: sumColor, unit }), + s({ metric: pattern.cumulative, name: `${name} cumulative`, color: cumulativeColor ?? colors.blue, unit, defaultActive: false }), + ]; + + /** + * Create series from CoinbasePattern (has sats, bitcoin, dollars as BitcoinPattern) + * BitcoinPattern has .base and .cumulative (no .sum) + * @param {CoinbasePattern} pattern + * @param {string} name + * @param {Color} sumColor + * @param {Color} cumulativeColor + */ + const fromCoinbase = (pattern, name, sumColor, cumulativeColor) => [ + s({ metric: pattern.sats.base, name: `${name}`, color: sumColor, unit: "sats" }), + s({ metric: pattern.sats.cumulative, name: `${name} cumulative`, color: cumulativeColor, unit: "sats", defaultActive: false }), + s({ metric: pattern.bitcoin.base, name: `${name}`, color: sumColor, unit: "btc" }), + s({ metric: pattern.bitcoin.cumulative, name: `${name} cumulative`, color: cumulativeColor, unit: "btc", defaultActive: false }), + s({ metric: pattern.dollars.base, name: `${name}`, color: sumColor, unit: "usd" }), + s({ metric: pattern.dollars.cumulative, name: `${name} cumulative`, color: cumulativeColor, unit: "usd", defaultActive: false }), + ]; + + /** + * Create series from ValuePattern (has sats, bitcoin, dollars as BlockCountPattern) + * BlockCountPattern has .base, .sum, and .cumulative + * @param {ValuePattern} pattern + * @param {string} name + * @param {Color} sumColor + * @param {Color} cumulativeColor + */ + const fromValuePattern = (pattern, name, sumColor, cumulativeColor) => [ + s({ metric: pattern.sats.base, name: `${name}`, color: sumColor, unit: "sats" }), + s({ metric: pattern.sats.cumulative, name: `${name} cumulative`, color: cumulativeColor, unit: "sats", defaultActive: false }), + s({ metric: pattern.bitcoin.base, name: `${name}`, color: sumColor, unit: "btc" }), + s({ metric: pattern.bitcoin.cumulative, name: `${name} cumulative`, color: cumulativeColor, unit: "btc", defaultActive: false }), + s({ metric: pattern.dollars.base, name: `${name}`, color: sumColor, unit: "usd" }), + s({ metric: pattern.dollars.cumulative, name: `${name} cumulative`, color: cumulativeColor, unit: "usd", defaultActive: false }), + ]; + + /** + * Create series from RewardPattern (has .base as Indexes2, plus bitcoin/dollars as BlockCountPattern, sats as SatsPattern) + * Note: SatsPattern only has cumulative and sum, so we use pattern.base for raw sats + * @param {RewardPattern} pattern + * @param {string} name + * @param {Color} sumColor + * @param {Color} cumulativeColor + */ + const fromRewardPattern = (pattern, name, sumColor, cumulativeColor) => [ + s({ metric: pattern.base, name: `${name}`, color: sumColor, unit: "sats" }), + s({ metric: pattern.sats.cumulative, name: `${name} cumulative`, color: cumulativeColor, unit: "sats", defaultActive: false }), + s({ metric: pattern.bitcoin.base, name: `${name}`, color: sumColor, unit: "btc" }), + s({ metric: pattern.bitcoin.cumulative, name: `${name} cumulative`, color: cumulativeColor, unit: "btc", defaultActive: false }), + s({ metric: pattern.dollars.base, name: `${name}`, color: sumColor, unit: "usd" }), + s({ metric: pattern.dollars.cumulative, name: `${name} cumulative`, color: cumulativeColor, unit: "usd", defaultActive: false }), + ]; + + // Build pools tree dynamically + const poolEntries = Object.entries(pools.vecs); + const poolsTree = poolEntries.map(([key, pool]) => { + const poolName = brk.POOL_ID_TO_POOL_NAME[/** @type {keyof typeof brk.POOL_ID_TO_POOL_NAME} */ (key.toLowerCase())] || key; + return { + name: poolName, + tree: [ + { + name: "Dominance", + title: `Mining Dominance of ${poolName}`, + bottom: [ + s({ metric: pool._1dDominance.base, name: "1d", color: colors.rose, unit: "percentage", defaultActive: false }), + s({ metric: pool._1wDominance, name: "1w", color: colors.red, unit: "percentage", defaultActive: false }), + s({ metric: pool._1mDominance, name: "1m", unit: "percentage" }), + s({ metric: pool._1yDominance, name: "1y", color: colors.lime, unit: "percentage", defaultActive: false }), + s({ metric: pool.dominance.base, name: "all time", color: colors.teal, unit: "percentage", defaultActive: false }), + ], + }, + { + name: "Blocks mined", + title: `Blocks mined by ${poolName}`, + bottom: [ + s({ metric: pool.blocksMined.base, name: "Sum", unit: "count" }), + s({ metric: pool.blocksMined.cumulative, name: "Cumulative", color: colors.blue, unit: "count" }), + s({ metric: pool._1wBlocksMined, name: "1w Sum", color: colors.red, unit: "count", defaultActive: false }), + s({ metric: pool._1mBlocksMined, name: "1m Sum", color: colors.pink, unit: "count", defaultActive: false }), + s({ metric: pool._1yBlocksMined, name: "1y Sum", color: colors.purple, unit: "count", defaultActive: false }), + ], + }, + { + name: "Rewards", + title: `Rewards collected by ${poolName}`, + bottom: [ + ...fromValuePattern(pool.coinbase, "coinbase", colors.orange, colors.red), + ...fromRewardPattern(pool.subsidy, "subsidy", colors.lime, colors.emerald), + ...fromRewardPattern(pool.fee, "fee", colors.cyan, colors.indigo), + ], + }, + { + name: "Days since block", + title: `Days since ${poolName} mined a block`, + bottom: [ + s({ metric: pool.daysSinceBlock, name: "Since block", unit: "days" }), + ], + }, + ], + }; + }); + + return { + name: "Chain", + tree: [ + // Block + { + name: "Block", + tree: [ + { + name: "Count", + title: "Block Count", + bottom: [ + ...fromBlockCount(blocks.count.blockCount, "Block", "count"), + s({ metric: blocks.count.blockCountTarget, name: "Target", color: colors.gray, unit: "count", options: { lineStyle: 4 } }), + s({ metric: blocks.count._1wBlockCount, name: "1w sum", color: colors.red, unit: "count", defaultActive: false }), + s({ metric: blocks.count._1mBlockCount, name: "1m sum", color: colors.pink, unit: "count", defaultActive: false }), + s({ metric: blocks.count._1yBlockCount, name: "1y sum", color: colors.purple, unit: "count", defaultActive: false }), + ], + }, + { + name: "Interval", + title: "Block Interval", + bottom: [ + s({ metric: blocks.interval.interval, name: "Interval", unit: "secs" }), + ...fromBlockInterval(blocks.interval.blockInterval, "Interval", "secs"), + createPriceLine({ unit: "secs", name: "Target", number: 600 }), + ], + }, + { + name: "Size", + title: "Block Size", + bottom: [ + s({ metric: blocks.size.vbytes, name: "vbytes raw", unit: "vb" }), + s({ metric: indexed.block.weight, name: "weight raw", unit: "wu" }), + ...fromBlockSize(blocks.size.blockSize, "size", "bytes"), + ...fromBlockSize(blocks.weight.blockWeight, "weight", "wu"), + ...fromBlockSize(blocks.size.blockVbytes, "vbytes", "vb"), + ], + }, + ], + }, + + // Transaction + { + name: "Transaction", + tree: [ + { + name: "Count", + title: "Transaction Count", + bottom: fromBitcoin(transactions.count.txCount, "Count", "count"), + }, + { + name: "Volume", + title: "Transaction Volume", + bottom: [ + s({ metric: transactions.volume.sentSum.sats, name: "Sent", unit: "sats" }), + s({ metric: transactions.volume.sentSum.bitcoin.base, name: "Sent", unit: "btc" }), + s({ metric: transactions.volume.sentSum.dollars, name: "Sent", unit: "usd" }), + s({ metric: transactions.volume.annualizedVolume, name: "annualized", color: colors.red, unit: "sats", defaultActive: false }), + s({ metric: transactions.volume.annualizedVolumeBtc, name: "annualized", color: colors.red, unit: "btc", defaultActive: false }), + s({ metric: transactions.volume.annualizedVolumeUsd, name: "annualized", color: colors.lime, unit: "usd", defaultActive: false }), + ], + }, + { + name: "Size", + title: "Transaction Size", + bottom: [ + ...fromBlockInterval(transactions.size.txWeight, "weight", "wu"), + ...fromBlockInterval(transactions.size.txVsize, "vsize", "vb"), + ], + }, + { + name: "Versions", + title: "Transaction Versions", + bottom: [ + ...fromBlockCount(transactions.versions.txV1, "v1", "count", colors.orange, colors.red), + ...fromBlockCount(transactions.versions.txV2, "v2", "count", colors.cyan, colors.blue), + ...fromBlockCount(transactions.versions.txV3, "v3", "count", colors.lime, colors.green), + ], + }, + { + name: "Velocity", + title: "Transactions Velocity", + bottom: [ + s({ metric: supply.velocity.btc, name: "bitcoin", unit: "ratio" }), + s({ metric: supply.velocity.usd, name: "dollars", color: colors.emerald, unit: "ratio" }), + ], + }, + { + name: "Speed", + title: "Transactions Per Second", + bottom: [ + s({ metric: transactions.volume.txPerSec, name: "Transactions", unit: "/sec" }), + ], + }, + ], + }, + + // Input + { + name: "Input", + tree: [ + { + name: "Count", + title: "Transaction Input Count", + bottom: [ + ...fromBlockSize(inputs.count.count, "Input", "count"), + ], + }, + { + name: "Speed", + title: "Inputs Per Second", + bottom: [ + s({ metric: transactions.volume.inputsPerSec, name: "Inputs", unit: "/sec" }), + ], + }, + ], + }, + + // Output + { + name: "Output", + tree: [ + { + name: "Count", + title: "Transaction Output Count", + bottom: [ + ...fromBlockSize(outputs.count.count, "Output", "count"), + ], + }, + { + name: "Speed", + title: "Outputs Per Second", + bottom: [ + s({ metric: transactions.volume.outputsPerSec, name: "Outputs", unit: "/sec" }), + ], + }, + ], + }, + + // UTXO + { + name: "UTXO", + tree: [ + { + name: "Count", + title: "UTXO Count", + bottom: [ + s({ metric: outputs.count.utxoCount.base, name: "Count", unit: "count" }), + ], + }, + ], + }, + + // Coinbase + { + name: "Coinbase", + title: "Coinbase Rewards", + bottom: fromCoinbase(blocks.rewards.coinbase, "Coinbase", colors.orange, colors.red), + }, + + // Subsidy + { + name: "Subsidy", + title: "Block Subsidy", + bottom: [ + ...fromCoinbase(blocks.rewards.subsidy, "Subsidy", colors.lime, colors.emerald), + s({ metric: blocks.rewards.subsidyDominance, name: "Dominance", color: colors.purple, unit: "percentage", defaultActive: false }), + ], + }, + + // Fee + { + name: "Fee", + tree: [ + { + name: "Total", + title: "Transaction Fees", + bottom: [ + s({ metric: transactions.fees.fee.sats.sum, name: "Sum", unit: "sats" }), + s({ metric: transactions.fees.fee.sats.cumulative, name: "Cumulative", color: colors.blue, unit: "sats", defaultActive: false }), + s({ metric: transactions.fees.fee.bitcoin.sum, name: "Sum", unit: "btc" }), + s({ metric: transactions.fees.fee.bitcoin.cumulative, name: "Cumulative", color: colors.blue, unit: "btc", defaultActive: false }), + s({ metric: transactions.fees.fee.dollars.sum, name: "Sum", unit: "usd" }), + s({ metric: transactions.fees.fee.dollars.cumulative, name: "Cumulative", color: colors.blue, unit: "usd", defaultActive: false }), + s({ metric: blocks.rewards.feeDominance, name: "Dominance", color: colors.purple, unit: "percentage", defaultActive: false }), + ], + }, + { + name: "Rate", + title: "Fee Rate", + bottom: [ + s({ metric: transactions.fees.feeRate.base, name: "Rate", unit: "sat/vb" }), + s({ metric: transactions.fees.feeRate.average, name: "Average", color: colors.blue, unit: "sat/vb" }), + s({ metric: transactions.fees.feeRate.median, name: "Median", color: colors.purple, unit: "sat/vb" }), + s({ metric: transactions.fees.feeRate.min, name: "Min", color: colors.red, unit: "sat/vb", defaultActive: false }), + s({ metric: transactions.fees.feeRate.max, name: "Max", color: colors.green, unit: "sat/vb", defaultActive: false }), + s({ metric: transactions.fees.feeRate.pct10, name: "pct10", color: colors.rose, unit: "sat/vb", defaultActive: false }), + s({ metric: transactions.fees.feeRate.pct25, name: "pct25", color: colors.pink, unit: "sat/vb", defaultActive: false }), + s({ metric: transactions.fees.feeRate.pct75, name: "pct75", color: colors.violet, unit: "sat/vb", defaultActive: false }), + s({ metric: transactions.fees.feeRate.pct90, name: "pct90", color: colors.fuchsia, unit: "sat/vb", defaultActive: false }), + ], + }, + ], + }, + + // Mining + { + name: "Mining", + tree: [ + { + name: "Hashrate", + title: "Network Hashrate", + bottom: [ + s({ metric: blocks.mining.hashRate, name: "Hashrate", unit: "h/s" }), + s({ metric: blocks.mining.hashRate1wSma, name: "1w SMA", color: colors.red, unit: "h/s", defaultActive: false }), + s({ metric: blocks.mining.hashRate1mSma, name: "1m SMA", color: colors.orange, unit: "h/s", defaultActive: false }), + s({ metric: blocks.mining.hashRate2mSma, name: "2m SMA", color: colors.yellow, unit: "h/s", defaultActive: false }), + s({ metric: blocks.mining.hashRate1ySma, name: "1y SMA", color: colors.lime, unit: "h/s", defaultActive: false }), + ], + }, + { + name: "Difficulty", + title: "Network Difficulty", + bottom: [ + s({ metric: blocks.mining.difficulty, name: "Difficulty", unit: "difficulty" }), + s({ metric: blocks.mining.difficultyAdjustment, name: "Adjustment", color: colors.orange, unit: "percentage", defaultActive: false }), + s({ metric: blocks.mining.difficultyAsHash, name: "As hash", color: colors.default, unit: "h/s", defaultActive: false, options: { lineStyle: 1 } }), + s({ metric: blocks.difficulty.blocksBeforeNextDifficultyAdjustment, name: "Blocks until adj.", color: colors.indigo, unit: "blocks", defaultActive: false }), + s({ metric: blocks.difficulty.daysBeforeNextDifficultyAdjustment, name: "Days until adj.", color: colors.purple, unit: "days", defaultActive: false }), + ], + }, + { + name: "Hash Price", + title: "Hash Price", + bottom: [ + s({ metric: blocks.mining.hashPriceThs, name: "TH/s", color: colors.emerald, unit: "usd/(th/s)/day" }), + s({ metric: blocks.mining.hashPricePhs, name: "PH/s", color: colors.emerald, unit: "usd/(ph/s)/day" }), + s({ metric: blocks.mining.hashPriceRebound, name: "Rebound", color: colors.yellow, unit: "percentage" }), + s({ metric: blocks.mining.hashPriceThsMin, name: "TH/s Min", color: colors.red, unit: "usd/(th/s)/day", options: { lineStyle: 1 } }), + s({ metric: blocks.mining.hashPricePhsMin, name: "PH/s Min", color: colors.red, unit: "usd/(ph/s)/day", options: { lineStyle: 1 } }), + ], + }, + { + name: "Hash Value", + title: "Hash Value", + bottom: [ + s({ metric: blocks.mining.hashValueThs, name: "TH/s", color: colors.orange, unit: "sats/(th/s)/day" }), + s({ metric: blocks.mining.hashValuePhs, name: "PH/s", color: colors.orange, unit: "sats/(ph/s)/day" }), + s({ metric: blocks.mining.hashValueRebound, name: "Rebound", color: colors.yellow, unit: "percentage" }), + s({ metric: blocks.mining.hashValueThsMin, name: "TH/s Min", color: colors.red, unit: "sats/(th/s)/day", options: { lineStyle: 1 } }), + s({ metric: blocks.mining.hashValuePhsMin, name: "PH/s Min", color: colors.red, unit: "sats/(ph/s)/day", options: { lineStyle: 1 } }), + ], + }, + { + name: "Halving", + title: "Halving Info", + bottom: [ + s({ metric: blocks.halving.blocksBeforeNextHalving, name: "Blocks until halving", unit: "blocks" }), + s({ metric: blocks.halving.daysBeforeNextHalving, name: "Days until halving", color: colors.orange, unit: "days" }), + s({ metric: blocks.halving.halvingepoch, name: "Halving epoch", color: colors.purple, unit: "epoch", defaultActive: false }), + ], + }, + { + name: "Puell Multiple", + title: "Puell Multiple", + bottom: [ + s({ metric: market.indicators.puellMultiple, name: "Puell Multiple", unit: "ratio" }), + createPriceLine({ unit: "ratio", number: 1 }), + ], + }, + ], + }, + + // Pools + { + name: "Pools", + tree: poolsTree, + }, + + // Unspendable + { + name: "Unspendable", + tree: [ + { + name: "OP_RETURN", + tree: [ + { + name: "Outputs", + title: "OP_RETURN Outputs", + bottom: fromBitcoin(scripts.count.opreturnCount, "Count", "count"), + }, + ], + }, + ], + }, + + // Inflation + { + name: "Inflation", + title: "Inflation Rate", + bottom: [ + s({ metric: supply.inflation.indexes, name: "Rate", unit: "percentage" }), + ], + }, + ], + }; +} diff --git a/websites/bitview/scripts/options/partial/cohorts/address.js b/websites/bitview/scripts/options/partial/cohorts/address.js new file mode 100644 index 000000000..f23f62e70 --- /dev/null +++ b/websites/bitview/scripts/options/partial/cohorts/address.js @@ -0,0 +1,313 @@ +/** + * Address cohort folder builder + * Creates option trees for address-based cohorts (has addrCount) + * Address cohorts use _0satsPattern which has PricePaidPattern (no percentiles) + */ + +import { + createSingleSupplySeries, + createGroupedSupplyTotalSeries, + createGroupedSupplyInProfitSeries, + createGroupedSupplyInLossSeries, + createUtxoCountSeries, + createAddressCountSeries, + createRealizedPriceSeries, + createRealizedPriceRatioSeries, +} from "./shared.js"; + +/** + * Create a cohort folder for address cohorts + * Includes address count section (addrCount exists on AddressCohortObject) + * @param {PartialContext} ctx + * @param {AddressCohortObject | AddressCohortGroupObject} args + * @returns {PartialOptionsGroup} + */ +export function createAddressCohortFolder(ctx, args) { + const list = "list" in args ? args.list : [args]; + const useGroupName = "list" in args; + const isSingle = !("list" in args); + + const title = args.title ? `${useGroupName ? "by" : "of"} ${args.title}` : ""; + + return { + name: args.name || "all", + tree: [ + // Supply section + isSingle + ? { + name: "supply", + title: `Supply ${title}`, + bottom: createSingleSupplySeries(ctx, /** @type {AddressCohortObject} */ (args), title), + } + : { + name: "supply", + tree: [ + { + name: "total", + title: `Supply ${title}`, + bottom: createGroupedSupplyTotalSeries(ctx, list), + }, + { + name: "in profit", + title: `Supply In Profit ${title}`, + bottom: createGroupedSupplyInProfitSeries(ctx, list), + }, + { + name: "in loss", + title: `Supply In Loss ${title}`, + bottom: createGroupedSupplyInLossSeries(ctx, list), + }, + ], + }, + + // UTXO count + { + name: "utxo count", + title: `UTXO Count ${title}`, + bottom: createUtxoCountSeries(ctx, list, useGroupName), + }, + + // Address count (ADDRESS COHORTS ONLY - fully type safe!) + { + name: "address count", + title: `Address Count ${title}`, + bottom: createAddressCountSeries(ctx, list, useGroupName), + }, + + // Realized section + { + name: "Realized", + tree: [ + ...(useGroupName + ? [ + { + name: "Price", + title: `Realized Price ${title}`, + top: createRealizedPriceSeries(ctx, list), + }, + { + name: "Ratio", + title: `Realized Price Ratio ${title}`, + bottom: createRealizedPriceRatioSeries(ctx, list), + }, + ] + : createRealizedPriceOptions(ctx, /** @type {AddressCohortObject} */ (args), title)), + { + name: "capitalization", + title: `Realized Capitalization ${title}`, + bottom: createRealizedCapWithExtras(ctx, list, args, useGroupName, title), + }, + ...(!useGroupName ? createRealizedPnlSection(ctx, /** @type {AddressCohortObject} */ (args), title) : []), + ], + }, + + // Unrealized section + ...createUnrealizedSection(ctx, list, useGroupName, title), + + // Price paid section (no percentiles for address cohorts) + ...createPricePaidSection(ctx, list, useGroupName, title), + + // Activity section + ...createActivitySection(ctx, list, useGroupName, title), + ], + }; +} + +/** + * Create realized price options for single cohort + * @param {PartialContext} ctx + * @param {AddressCohortObject} args + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createRealizedPriceOptions(ctx, args, title) { + const { s } = ctx; + const { tree, color } = args; + + return [ + { + name: "price", + title: `Realized Price ${title}`, + top: [s({ metric: tree.realized.realizedPrice, name: "realized", color })], + }, + ]; +} + +/** + * Create realized cap with extras + * @param {PartialContext} ctx + * @param {readonly AddressCohortObject[]} list + * @param {AddressCohortObject | AddressCohortGroupObject} args + * @param {boolean} useGroupName + * @param {string} title + * @returns {AnyFetchedSeriesBlueprint[]} + */ +function createRealizedCapWithExtras(ctx, list, args, useGroupName, title) { + const { colors, s, createPriceLine } = ctx; + const isSingle = !("list" in args); + + return list.flatMap(({ color, name, tree }) => [ + s({ metric: tree.realized.realizedCap, name: useGroupName ? name : "Capitalization", color }), + ...(isSingle + ? [ + /** @type {AnyFetchedSeriesBlueprint} */ ({ + type: "Baseline", + metric: tree.realized.realizedCap30dDelta, + title: "30d change", + defaultActive: false, + }), + createPriceLine({ unit: "usd", defaultActive: false }), + ] + : []), + // RealizedPattern (address cohorts) doesn't have realizedCapRelToOwnMarketCap + ]); +} + +/** + * Create realized PnL section for single cohort + * @param {PartialContext} ctx + * @param {AddressCohortObject} args + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createRealizedPnlSection(ctx, args, title) { + const { colors, s } = ctx; + const { tree } = args; + + return [ + { + name: "pnl", + title: `Realized Profit And Loss ${title}`, + bottom: [ + s({ metric: tree.realized.realizedProfit.base, name: "Profit", color: colors.green }), + s({ metric: tree.realized.realizedLoss.base, name: "Loss", color: colors.red, defaultActive: false }), + // RealizedPattern (address cohorts) doesn't have realizedProfitToLossRatio + s({ metric: tree.realized.totalRealizedPnl.base, name: "Total", color: colors.default, defaultActive: false }), + s({ metric: tree.realized.negRealizedLoss.base, name: "Negative Loss", color: colors.red }), + ], + }, + ]; +} + +/** + * Create unrealized section + * @param {PartialContext} ctx + * @param {readonly AddressCohortObject[]} list + * @param {boolean} useGroupName + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createUnrealizedSection(ctx, list, useGroupName, title) { + const { colors, s } = ctx; + + return [ + { + name: "Unrealized", + tree: [ + { + name: "nupl", + title: `Net Unrealized Profit/Loss ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + /** @type {AnyFetchedSeriesBlueprint} */ ({ + type: "Baseline", + metric: tree.unrealized.netUnrealizedPnl, + title: useGroupName ? name : "NUPL", + colors: [colors.red, colors.green], + options: { baseValue: { price: 0 } }, + }), + ]), + }, + { + name: "profit", + title: `Unrealized Profit ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ metric: tree.unrealized.unrealizedProfit, name: useGroupName ? name : "Profit", color }), + ]), + }, + { + name: "loss", + title: `Unrealized Loss ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ metric: tree.unrealized.unrealizedLoss, name: useGroupName ? name : "Loss", color }), + ]), + }, + ], + }, + ]; +} + +/** + * Create price paid section (no percentiles for address cohorts) + * @param {PartialContext} ctx + * @param {readonly AddressCohortObject[]} list + * @param {boolean} useGroupName + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createPricePaidSection(ctx, list, useGroupName, title) { + const { s } = ctx; + + return [ + { + name: "Price Paid", + tree: [ + { + name: "min", + title: `Min Price Paid ${title}`, + top: list.map(({ color, name, tree }) => + s({ metric: tree.pricePaid.minPricePaid, name: useGroupName ? name : "Min", color }), + ), + }, + { + name: "max", + title: `Max Price Paid ${title}`, + top: list.map(({ color, name, tree }) => + s({ metric: tree.pricePaid.maxPricePaid, name: useGroupName ? name : "Max", color }), + ), + }, + ], + }, + ]; +} + +/** + * Create activity section + * @param {PartialContext} ctx + * @param {readonly AddressCohortObject[]} list + * @param {boolean} useGroupName + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createActivitySection(ctx, list, useGroupName, title) { + const { s } = ctx; + + return [ + { + name: "Activity", + tree: [ + { + name: "coinblocks destroyed", + title: `Coinblocks Destroyed ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.activity.coinblocksDestroyed.base, + name: useGroupName ? name : "Coinblocks", + color, + }), + ]), + }, + { + name: "coindays destroyed", + title: `Coindays Destroyed ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.activity.coindaysDestroyed.base, + name: useGroupName ? name : "Coindays", + color, + }), + ]), + }, + ], + }, + ]; +} diff --git a/websites/bitview/scripts/options/partial/cohorts/data.js b/websites/bitview/scripts/options/partial/cohorts/data.js new file mode 100644 index 000000000..1d1a1c400 --- /dev/null +++ b/websites/bitview/scripts/options/partial/cohorts/data.js @@ -0,0 +1,227 @@ +/** Build cohort data arrays from brk.tree */ + +import { + termColors, + maxAgeColors, + minAgeColors, + ageRangeColors, + epochColors, + geAmountColors, + ltAmountColors, + amountRangeColors, + spendableTypeColors, +} from "../colors/index.js"; + +/** + * @template {Record} T + * @param {T} obj + * @returns {[keyof T & string, T[keyof T & string]][]} + */ +const entries = (obj) => /** @type {[keyof T & string, T[keyof T & string]][]} */ (Object.entries(obj)); + +/** + * Build all cohort data from brk tree + * @param {Colors} colors + * @param {BrkClient} brk + */ +export function buildCohortData(colors, brk) { + const utxoCohorts = brk.tree.computed.distribution.utxoCohorts; + const addressCohorts = brk.tree.computed.distribution.addressCohorts; + const { + TERM_NAMES, + EPOCH_NAMES, + MAX_AGE_NAMES, + MIN_AGE_NAMES, + AGE_RANGE_NAMES, + GE_AMOUNT_NAMES, + LT_AMOUNT_NAMES, + AMOUNT_RANGE_NAMES, + SPENDABLE_TYPE_NAMES, + } = brk; + + // Base cohort representing "all" + /** @type {UtxoCohortObject} */ + const cohortAll = { + name: "", + title: "", + color: colors.orange, + tree: utxoCohorts.all, + }; + + /** @type {UtxoCohortObject} */ + const cohortAllForComparison = { + name: "all", + title: "", + color: colors.default, + tree: utxoCohorts.all, + }; + + // Term cohorts (short/long term holders) + /** @type {readonly UtxoCohortObject[]} */ + const terms = entries(utxoCohorts.term).map(([key, tree]) => { + const names = TERM_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[termColors[key]], + tree, + }; + }); + + // Max age cohorts (up to X time) + /** @type {readonly UtxoCohortObject[]} */ + const upToDate = entries(utxoCohorts.maxAge).map(([key, tree]) => { + const names = MAX_AGE_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[maxAgeColors[key]], + tree, + }; + }); + + // Min age cohorts (from X time) + /** @type {readonly UtxoCohortObject[]} */ + const fromDate = entries(utxoCohorts.minAge).map(([key, tree]) => { + const names = MIN_AGE_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[minAgeColors[key]], + tree, + }; + }); + + // Age range cohorts + /** @type {readonly UtxoCohortObject[]} */ + const dateRange = entries(utxoCohorts.ageRange).map(([key, tree]) => { + const names = AGE_RANGE_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[ageRangeColors[key]], + tree, + }; + }); + + // Epoch cohorts + /** @type {readonly UtxoCohortObject[]} */ + const epoch = entries(utxoCohorts.epoch).map(([key, tree]) => { + const names = EPOCH_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[epochColors[key]], + tree, + }; + }); + + // UTXOs above amount + /** @type {readonly UtxoCohortObject[]} */ + const utxosAboveAmount = entries(utxoCohorts.geAmount).map(([key, tree]) => { + const names = GE_AMOUNT_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[geAmountColors[key]], + tree, + }; + }); + + // Addresses above amount + /** @type {readonly AddressCohortObject[]} */ + const addressesAboveAmount = entries(addressCohorts.geAmount).map( + ([key, tree]) => { + const names = GE_AMOUNT_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[geAmountColors[key]], + tree, + }; + }, + ); + + // UTXOs under amount + /** @type {readonly UtxoCohortObject[]} */ + const utxosUnderAmount = entries(utxoCohorts.ltAmount).map(([key, tree]) => { + const names = LT_AMOUNT_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[ltAmountColors[key]], + tree, + }; + }); + + // Addresses under amount + /** @type {readonly AddressCohortObject[]} */ + const addressesUnderAmount = entries(addressCohorts.ltAmount).map( + ([key, tree]) => { + const names = LT_AMOUNT_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[ltAmountColors[key]], + tree, + }; + }, + ); + + // UTXOs amount ranges + /** @type {readonly UtxoCohortObject[]} */ + const utxosAmountRanges = entries(utxoCohorts.amountRange).map( + ([key, tree]) => { + const names = AMOUNT_RANGE_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[amountRangeColors[key]], + tree, + }; + }, + ); + + // Addresses amount ranges + /** @type {readonly AddressCohortObject[]} */ + const addressesAmountRanges = entries(addressCohorts.amountRange).map( + ([key, tree]) => { + const names = AMOUNT_RANGE_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[amountRangeColors[key]], + tree, + }; + }, + ); + + // Spendable type cohorts + /** @type {readonly UtxoCohortObject[]} */ + const type = entries(utxoCohorts.type).map(([key, tree]) => { + const names = SPENDABLE_TYPE_NAMES[key]; + return { + name: names.short, + title: names.long, + color: colors[spendableTypeColors[key]], + tree, + }; + }); + + return { + cohortAll, + cohortAllForComparison, + terms, + upToDate, + fromDate, + dateRange, + epoch, + utxosAboveAmount, + addressesAboveAmount, + utxosUnderAmount, + addressesUnderAmount, + utxosAmountRanges, + addressesAmountRanges, + type, + }; +} diff --git a/websites/bitview/scripts/options/partial/cohorts/index.js b/websites/bitview/scripts/options/partial/cohorts/index.js new file mode 100644 index 000000000..6437b415f --- /dev/null +++ b/websites/bitview/scripts/options/partial/cohorts/index.js @@ -0,0 +1,25 @@ +/** + * Cohort module - exports all cohort-related functionality + */ + +// Cohort data builder +export { buildCohortData } from "./data.js"; + +// Cohort folder builders (type-safe!) +export { createUtxoCohortFolder, createAgeCohortFolder, createAmountCohortFolder } from "./utxo.js"; +export { createAddressCohortFolder } from "./address.js"; + +// Shared helpers +export { + createSingleSupplySeries, + createGroupedSupplyTotalSeries, + createGroupedSupplyInProfitSeries, + createGroupedSupplyInLossSeries, + createUtxoCountSeries, + createAddressCountSeries, + createRealizedPriceSeries, + createRealizedPriceRatioSeries, + createRealizedCapSeries, + createPricePaidMinMaxSeries, + createPricePercentilesSeries, +} from "./shared.js"; diff --git a/websites/bitview/scripts/options/partial/cohorts/shared.js b/websites/bitview/scripts/options/partial/cohorts/shared.js new file mode 100644 index 000000000..365a02c14 --- /dev/null +++ b/websites/bitview/scripts/options/partial/cohorts/shared.js @@ -0,0 +1,216 @@ +/** Shared cohort chart section builders */ + +/** + * Create supply section for a single cohort + * @param {PartialContext} ctx + * @param {CohortObject} cohort + * @param {string} title + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createSingleSupplySeries(ctx, cohort, title) { + const { colors, s, createPriceLine } = ctx; + const { tree, color, name } = cohort; + + return [ + s({ metric: tree.supply.supply.sats, name: "Supply", color: colors.default }), + s({ metric: tree.supply.supply.bitcoin, name: "Supply", color: colors.default }), + s({ metric: tree.supply.supply.dollars, name: "Supply", color: colors.default }), + ...("supplyRelToCirculatingSupply" in tree.relative + ? [s({ metric: tree.relative.supplyRelToCirculatingSupply, name: "Supply", color: colors.default })] + : []), + s({ metric: tree.unrealized.supplyInProfit.sats, name: "In Profit", color: colors.green }), + s({ metric: tree.unrealized.supplyInProfit.bitcoin, name: "In Profit", color: colors.green }), + s({ metric: tree.unrealized.supplyInProfit.dollars, name: "In Profit", color: colors.green }), + s({ metric: tree.unrealized.supplyInLoss.sats, name: "In Loss", color: colors.red }), + s({ metric: tree.unrealized.supplyInLoss.bitcoin, name: "In Loss", color: colors.red }), + s({ metric: tree.unrealized.supplyInLoss.dollars, name: "In Loss", color: colors.red }), + s({ metric: tree.supply.supplyHalf.sats, name: "half", color: colors.gray, options: { lineStyle: 4 } }), + s({ metric: tree.supply.supplyHalf.bitcoin, name: "half", color: colors.gray, options: { lineStyle: 4 } }), + s({ metric: tree.supply.supplyHalf.dollars, name: "half", color: colors.gray, options: { lineStyle: 4 } }), + ...("supplyInProfitRelToCirculatingSupply" in tree.relative + ? [ + s({ metric: tree.relative.supplyInProfitRelToCirculatingSupply, name: "In Profit", color: colors.green }), + s({ metric: tree.relative.supplyInLossRelToCirculatingSupply, name: "In Loss", color: colors.red }), + ] + : []), + s({ metric: tree.relative.supplyInProfitRelToOwnSupply, name: "In Profit", color: colors.green }), + s({ metric: tree.relative.supplyInLossRelToOwnSupply, name: "In Loss", color: colors.red }), + createPriceLine({ unit: "%self", number: 100, lineStyle: 0, color: colors.default }), + createPriceLine({ unit: "%self", number: 50 }), + ]; +} + +/** + * Create supply total series for grouped cohorts + * @param {PartialContext} ctx + * @param {readonly CohortObject[]} list + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createGroupedSupplyTotalSeries(ctx, list) { + const { s, constant100 } = ctx; + + return list.flatMap(({ color, name, tree }) => [ + s({ metric: tree.supply.supply.sats, name, color }), + s({ metric: tree.supply.supply.bitcoin, name, color }), + s({ metric: tree.supply.supply.dollars, name, color }), + "supplyRelToCirculatingSupply" in tree.relative + ? s({ metric: tree.relative.supplyRelToCirculatingSupply, name, color }) + : s({ unit: "%all", metric: constant100, name, color }), + ]); +} + +/** + * Create supply in profit series for grouped cohorts + * @param {PartialContext} ctx + * @param {readonly CohortObject[]} list + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createGroupedSupplyInProfitSeries(ctx, list) { + const { s } = ctx; + + return list.flatMap(({ color, name, tree }) => [ + s({ metric: tree.unrealized.supplyInProfit.sats, name, color }), + s({ metric: tree.unrealized.supplyInProfit.bitcoin, name, color }), + s({ metric: tree.unrealized.supplyInProfit.dollars, name, color }), + ...("supplyInProfitRelToCirculatingSupply" in tree.relative + ? [s({ metric: tree.relative.supplyInProfitRelToCirculatingSupply, name, color })] + : []), + ]); +} + +/** + * Create supply in loss series for grouped cohorts + * @param {PartialContext} ctx + * @param {readonly CohortObject[]} list + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createGroupedSupplyInLossSeries(ctx, list) { + const { s } = ctx; + + return list.flatMap(({ color, name, tree }) => [ + s({ metric: tree.unrealized.supplyInLoss.sats, name, color }), + s({ metric: tree.unrealized.supplyInLoss.bitcoin, name, color }), + s({ metric: tree.unrealized.supplyInLoss.dollars, name, color }), + ...("supplyInLossRelToCirculatingSupply" in tree.relative + ? [s({ metric: tree.relative.supplyInLossRelToCirculatingSupply, name, color })] + : []), + ]); +} + +/** + * Create UTXO count series + * @param {PartialContext} ctx + * @param {readonly CohortObject[]} list + * @param {boolean} useGroupName + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createUtxoCountSeries(ctx, list, useGroupName) { + const { s } = ctx; + + return list.flatMap(({ color, name, tree }) => [ + s({ metric: tree.supply.utxoCount, name: useGroupName ? name : "Count", color }), + ]); +} + +/** + * Create address count series (for address cohorts only) + * @param {PartialContext} ctx + * @param {readonly AddressCohortObject[]} list + * @param {boolean} useGroupName + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createAddressCountSeries(ctx, list, useGroupName) { + const { s, colors } = ctx; + + return list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.addrCount, + name: useGroupName ? name : "Count", + color: useGroupName ? color : colors.orange, + }), + ]); +} + +/** + * Create realized price series for grouped cohorts + * @param {PartialContext} ctx + * @param {readonly CohortObject[]} list + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createRealizedPriceSeries(ctx, list) { + const { s } = ctx; + + return list.map(({ color, name, tree }) => + s({ metric: tree.realized.realizedPrice, name, color }), + ); +} + +/** + * Create realized price ratio series for grouped cohorts + * @param {PartialContext} ctx + * @param {readonly CohortObject[]} list + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createRealizedPriceRatioSeries(ctx, list) { + const { s, createPriceLine } = ctx; + + return [ + ...list.map(({ color, name, tree }) => + s({ metric: tree.realized.realizedPriceExtra.ratio, name, color }), + ), + createPriceLine({ unit: "ratio", number: 1 }), + ]; +} + +/** + * Create realized capitalization series + * @param {PartialContext} ctx + * @param {readonly CohortObject[]} list + * @param {boolean} useGroupName + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createRealizedCapSeries(ctx, list, useGroupName) { + const { s } = ctx; + + return list.flatMap(({ color, name, tree }) => [ + s({ metric: tree.realized.realizedCap, name: useGroupName ? name : "Capitalization", color }), + ]); +} + +/** + * Create price paid min/max series (available on all cohorts) + * @param {PartialContext} ctx + * @param {readonly CohortObject[]} list + * @param {boolean} useGroupName + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createPricePaidMinMaxSeries(ctx, list, useGroupName) { + const { s } = ctx; + + return list.flatMap(({ color, name, tree }) => [ + s({ metric: tree.pricePaid.minPricePaid, name: useGroupName ? `${name} min` : "Min", color }), + s({ metric: tree.pricePaid.maxPricePaid, name: useGroupName ? `${name} max` : "Max", color }), + ]); +} + +/** + * Create price percentile series (only for cohorts with PricePaidPattern2) + * @param {PartialContext} ctx + * @param {readonly CohortWithPricePercentiles[]} list + * @param {boolean} useGroupName + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function createPricePercentilesSeries(ctx, list, useGroupName) { + const { s, colors } = ctx; + + return list.flatMap(({ color, name, tree }) => { + const pp = tree.pricePaid.pricePercentiles; + return [ + s({ metric: pp.pct10, name: useGroupName ? `${name} p10` : "p10", color, defaultActive: false }), + s({ metric: pp.pct25, name: useGroupName ? `${name} p25` : "p25", color, defaultActive: false }), + s({ metric: pp.pct50, name: useGroupName ? `${name} p50` : "p50", color }), + s({ metric: pp.pct75, name: useGroupName ? `${name} p75` : "p75", color, defaultActive: false }), + s({ metric: pp.pct90, name: useGroupName ? `${name} p90` : "p90", color, defaultActive: false }), + ]; + }); +} diff --git a/websites/bitview/scripts/options/partial/cohorts/types.js b/websites/bitview/scripts/options/partial/cohorts/types.js new file mode 100644 index 000000000..a6edf533a --- /dev/null +++ b/websites/bitview/scripts/options/partial/cohorts/types.js @@ -0,0 +1,6 @@ +/** + * Cohort-related type definitions + * Types are defined in ../types.js, this file exists for documentation + */ + +export {}; diff --git a/websites/bitview/scripts/options/partial/cohorts/utxo.js b/websites/bitview/scripts/options/partial/cohorts/utxo.js new file mode 100644 index 000000000..e67be375d --- /dev/null +++ b/websites/bitview/scripts/options/partial/cohorts/utxo.js @@ -0,0 +1,487 @@ +/** + * UTXO cohort folder builders + * Creates option trees for UTXO-based cohorts (no addrCount) + * + * Two main builders: + * - createAgeCohortFolder: For term, maxAge, minAge, ageRange, epoch (has price percentiles) + * - createAmountCohortFolder: For geAmount, ltAmount, amountRange, type (no price percentiles) + */ + +import { + createSingleSupplySeries, + createGroupedSupplyTotalSeries, + createGroupedSupplyInProfitSeries, + createGroupedSupplyInLossSeries, + createUtxoCountSeries, + createRealizedPriceSeries, + createRealizedPriceRatioSeries, + createRealizedCapSeries, + createPricePaidMinMaxSeries, + createPricePercentilesSeries, +} from "./shared.js"; + +/** + * Create a cohort folder for age-based UTXO cohorts (term, maxAge, minAge, ageRange, epoch) + * These cohorts have price percentiles via PricePaidPattern2 + * @param {PartialContext} ctx + * @param {AgeCohortObject | AgeCohortGroupObject} args + * @returns {PartialOptionsGroup} + */ +export function createAgeCohortFolder(ctx, args) { + const list = "list" in args ? args.list : [args]; + const useGroupName = "list" in args; + const isSingle = !("list" in args); + const title = args.title ? `${useGroupName ? "by" : "of"} ${args.title}` : ""; + + return { + name: args.name || "all", + tree: [ + ...createSupplySection(ctx, list, args, useGroupName, isSingle, title), + createUtxoCountSection(ctx, list, useGroupName, title), + createRealizedSection(ctx, list, args, useGroupName, isSingle, title), + ...createUnrealizedSection(ctx, list, useGroupName, title), + ...createPricePaidSectionWithPercentiles(ctx, list, useGroupName, title), + ...createActivitySection(ctx, list, useGroupName, title), + ], + }; +} + +/** + * Create a cohort folder for amount-based UTXO cohorts (geAmount, ltAmount, amountRange, type) + * These cohorts have only min/max price paid via PricePaidPattern + * @param {PartialContext} ctx + * @param {AmountCohortObject | AmountCohortGroupObject} args + * @returns {PartialOptionsGroup} + */ +export function createAmountCohortFolder(ctx, args) { + const list = "list" in args ? args.list : [args]; + const useGroupName = "list" in args; + const isSingle = !("list" in args); + const title = args.title ? `${useGroupName ? "by" : "of"} ${args.title}` : ""; + + return { + name: args.name || "all", + tree: [ + ...createSupplySection(ctx, list, args, useGroupName, isSingle, title), + createUtxoCountSection(ctx, list, useGroupName, title), + createRealizedSection(ctx, list, args, useGroupName, isSingle, title), + ...createUnrealizedSection(ctx, list, useGroupName, title), + ...createPricePaidSectionBasic(ctx, list, useGroupName, title), + ...createActivitySection(ctx, list, useGroupName, title), + ], + }; +} + +// Keep the generic version for backwards compatibility +/** + * Create a cohort folder for UTXO cohorts (generic, uses runtime check for percentiles) + * @deprecated Use createAgeCohortFolder or createAmountCohortFolder for type safety + * @param {PartialContext} ctx + * @param {UtxoCohortObject | UtxoCohortGroupObject} args + * @returns {PartialOptionsGroup} + */ +export function createUtxoCohortFolder(ctx, args) { + const list = "list" in args ? args.list : [args]; + const useGroupName = "list" in args; + const isSingle = !("list" in args); + const title = args.title ? `${useGroupName ? "by" : "of"} ${args.title}` : ""; + + // Runtime check for percentiles + const hasPercentiles = "pricePercentiles" in list[0].tree.pricePaid; + + return { + name: args.name || "all", + tree: [ + ...createSupplySection(ctx, list, args, useGroupName, isSingle, title), + createUtxoCountSection(ctx, list, useGroupName, title), + createRealizedSection(ctx, list, args, useGroupName, isSingle, title), + ...createUnrealizedSection(ctx, list, useGroupName, title), + ...(hasPercentiles + ? createPricePaidSectionWithPercentiles(ctx, /** @type {readonly AgeCohortObject[]} */ (list), useGroupName, title) + : createPricePaidSectionBasic(ctx, list, useGroupName, title)), + ...createActivitySection(ctx, list, useGroupName, title), + ], + }; +} + +/** + * Create supply section + * @param {PartialContext} ctx + * @param {readonly UtxoCohortObject[]} list + * @param {UtxoCohortObject | UtxoCohortGroupObject} args + * @param {boolean} useGroupName + * @param {boolean} isSingle + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createSupplySection(ctx, list, args, useGroupName, isSingle, title) { + return [ + isSingle + ? { + name: "supply", + title: `Supply ${title}`, + bottom: createSingleSupplySeries(ctx, /** @type {UtxoCohortObject} */ (args), title), + } + : { + name: "supply", + tree: [ + { + name: "total", + title: `Supply ${title}`, + bottom: createGroupedSupplyTotalSeries(ctx, list), + }, + { + name: "in profit", + title: `Supply In Profit ${title}`, + bottom: createGroupedSupplyInProfitSeries(ctx, list), + }, + { + name: "in loss", + title: `Supply In Loss ${title}`, + bottom: createGroupedSupplyInLossSeries(ctx, list), + }, + ], + }, + ]; +} + +/** + * Create UTXO count section + * @param {PartialContext} ctx + * @param {readonly UtxoCohortObject[]} list + * @param {boolean} useGroupName + * @param {string} title + * @returns {PartialChartOption} + */ +function createUtxoCountSection(ctx, list, useGroupName, title) { + return { + name: "utxo count", + title: `UTXO Count ${title}`, + bottom: createUtxoCountSeries(ctx, list, useGroupName), + }; +} + +/** + * Create realized section + * @param {PartialContext} ctx + * @param {readonly UtxoCohortObject[]} list + * @param {UtxoCohortObject | UtxoCohortGroupObject} args + * @param {boolean} useGroupName + * @param {boolean} isSingle + * @param {string} title + * @returns {PartialOptionsGroup} + */ +function createRealizedSection(ctx, list, args, useGroupName, isSingle, title) { + return { + name: "Realized", + tree: [ + ...(useGroupName + ? [ + { + name: "Price", + title: `Realized Price ${title}`, + top: createRealizedPriceSeries(ctx, list), + }, + { + name: "Ratio", + title: `Realized Price Ratio ${title}`, + bottom: createRealizedPriceRatioSeries(ctx, list), + }, + ] + : createRealizedPriceOptions(ctx, /** @type {UtxoCohortObject} */ (args), title)), + { + name: "capitalization", + title: `Realized Capitalization ${title}`, + bottom: createRealizedCapWithExtras(ctx, list, args, useGroupName, title), + }, + ...(!useGroupName ? createRealizedPnlSection(ctx, /** @type {UtxoCohortObject} */ (args), title) : []), + ], + }; +} + +/** + * Create realized price options for single cohort + * @param {PartialContext} ctx + * @param {UtxoCohortObject} args + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createRealizedPriceOptions(ctx, args, title) { + const { s } = ctx; + const { tree, color } = args; + + return [ + { + name: "price", + title: `Realized Price ${title}`, + top: [s({ metric: tree.realized.realizedPrice, name: "realized", color })], + }, + ]; +} + +/** + * Create realized cap with extras + * @param {PartialContext} ctx + * @param {readonly UtxoCohortObject[]} list + * @param {UtxoCohortObject | UtxoCohortGroupObject} args + * @param {boolean} useGroupName + * @param {string} title + * @returns {AnyFetchedSeriesBlueprint[]} + */ +function createRealizedCapWithExtras(ctx, list, args, useGroupName, title) { + const { colors, s, createPriceLine } = ctx; + const isSingle = !("list" in args); + + return list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.realized.realizedCap, + name: useGroupName ? name : "Capitalization", + color, + }), + ...(isSingle + ? [ + /** @type {AnyFetchedSeriesBlueprint} */ ({ + type: "Baseline", + metric: tree.realized.realizedCap30dDelta, + title: "30d change", + defaultActive: false, + }), + createPriceLine({ unit: "usd", defaultActive: false }), + ] + : []), + ...(isSingle && "realizedCapRelToOwnMarketCap" in tree.realized + ? [ + /** @type {AnyFetchedSeriesBlueprint} */ ({ + type: "Baseline", + metric: tree.realized.realizedCapRelToOwnMarketCap, + title: "ratio", + options: { baseValue: { price: 100 } }, + colors: [colors.red, colors.green], + }), + createPriceLine({ unit: "%cmcap", defaultActive: true, number: 100 }), + ] + : []), + ]); +} + +/** + * Create realized PnL section for single cohort + * @param {PartialContext} ctx + * @param {UtxoCohortObject} args + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createRealizedPnlSection(ctx, args, title) { + const { colors, s } = ctx; + const { tree } = args; + + return [ + { + name: "pnl", + title: `Realized Profit And Loss ${title}`, + bottom: [ + s({ + metric: tree.realized.realizedProfit.base, + name: "Profit", + color: colors.green, + }), + s({ + metric: tree.realized.realizedLoss.base, + name: "Loss", + color: colors.red, + defaultActive: false, + }), + ...("realizedProfitToLossRatio" in tree.realized + ? [ + s({ + metric: tree.realized.realizedProfitToLossRatio, + name: "profit / loss", + color: colors.yellow, + }), + ] + : []), + s({ + metric: tree.realized.totalRealizedPnl.base, + name: "Total", + color: colors.default, + defaultActive: false, + }), + s({ + metric: tree.realized.negRealizedLoss.base, + name: "Negative Loss", + color: colors.red, + }), + ], + }, + ]; +} + +/** + * Create unrealized section + * @param {PartialContext} ctx + * @param {readonly UtxoCohortObject[]} list + * @param {boolean} useGroupName + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createUnrealizedSection(ctx, list, useGroupName, title) { + const { colors, s, createPriceLine } = ctx; + + return [ + { + name: "Unrealized", + tree: [ + { + name: "nupl", + title: `Net Unrealized Profit/Loss ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + /** @type {AnyFetchedSeriesBlueprint} */ ({ + type: "Baseline", + metric: tree.unrealized.netUnrealizedPnl, + title: useGroupName ? name : "NUPL", + colors: [colors.red, colors.green], + options: { baseValue: { price: 0 } }, + }), + ]), + }, + { + name: "profit", + title: `Unrealized Profit ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.unrealized.unrealizedProfit, + name: useGroupName ? name : "Profit", + color, + }), + ]), + }, + { + name: "loss", + title: `Unrealized Loss ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.unrealized.unrealizedLoss, + name: useGroupName ? name : "Loss", + color, + }), + ]), + }, + ], + }, + ]; +} + +/** + * Create price paid section for cohorts WITH percentiles (age cohorts) + * @param {PartialContext} ctx + * @param {readonly AgeCohortObject[]} list + * @param {boolean} useGroupName + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createPricePaidSectionWithPercentiles(ctx, list, useGroupName, title) { + const { s } = ctx; + + return [ + { + name: "Price Paid", + tree: [ + { + name: "min", + title: `Min Price Paid ${title}`, + top: list.map(({ color, name, tree }) => + s({ metric: tree.pricePaid.minPricePaid, name: useGroupName ? name : "Min", color }), + ), + }, + { + name: "max", + title: `Max Price Paid ${title}`, + top: list.map(({ color, name, tree }) => + s({ metric: tree.pricePaid.maxPricePaid, name: useGroupName ? name : "Max", color }), + ), + }, + { + name: "percentiles", + title: `Price Paid Percentiles ${title}`, + top: createPricePercentilesSeries(ctx, list, useGroupName), + }, + ], + }, + ]; +} + +/** + * Create price paid section for cohorts WITHOUT percentiles (amount cohorts) + * @param {PartialContext} ctx + * @param {readonly UtxoCohortObject[]} list + * @param {boolean} useGroupName + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createPricePaidSectionBasic(ctx, list, useGroupName, title) { + const { s } = ctx; + + return [ + { + name: "Price Paid", + tree: [ + { + name: "min", + title: `Min Price Paid ${title}`, + top: list.map(({ color, name, tree }) => + s({ metric: tree.pricePaid.minPricePaid, name: useGroupName ? name : "Min", color }), + ), + }, + { + name: "max", + title: `Max Price Paid ${title}`, + top: list.map(({ color, name, tree }) => + s({ metric: tree.pricePaid.maxPricePaid, name: useGroupName ? name : "Max", color }), + ), + }, + ], + }, + ]; +} + +/** + * Create activity section + * @param {PartialContext} ctx + * @param {readonly UtxoCohortObject[]} list + * @param {boolean} useGroupName + * @param {string} title + * @returns {PartialOptionsTree} + */ +function createActivitySection(ctx, list, useGroupName, title) { + const { s } = ctx; + + return [ + { + name: "Activity", + tree: [ + { + name: "coinblocks destroyed", + title: `Coinblocks Destroyed ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.activity.coinblocksDestroyed.base, + name: useGroupName ? name : "Coinblocks", + color, + }), + ]), + }, + { + name: "coindays destroyed", + title: `Coindays Destroyed ${title}`, + bottom: list.flatMap(({ color, name, tree }) => [ + s({ + metric: tree.activity.coindaysDestroyed.base, + name: useGroupName ? name : "Coindays", + color, + }), + ]), + }, + ], + }, + ]; +} diff --git a/websites/bitview/scripts/options/partial/cointime.js b/websites/bitview/scripts/options/partial/cointime.js new file mode 100644 index 000000000..1334368b3 --- /dev/null +++ b/websites/bitview/scripts/options/partial/cointime.js @@ -0,0 +1,306 @@ +/** Cointime section builder - typed tree-based patterns */ + +/** + * Create price with ratio options for cointime prices + * @param {PartialContext} ctx + * @param {Object} args + * @param {string} args.name + * @param {string} args.title + * @param {string} args.legend + * @param {MetricAccessor} args.price + * @param {ActivePriceRatioPattern} args.ratio + * @param {Color} [args.color] + * @returns {PartialOptionsTree} + */ +function createCointimePriceWithRatioOptions(ctx, { name, title, legend, price, ratio, color }) { + const { s, colors, createPriceLine } = ctx; + + // Percentile USD mappings + const percentileUsdMap = [ + { name: "pct99", prop: ratio.ratioPct99Usd, color: colors.rose }, + { name: "pct98", prop: ratio.ratioPct98Usd, color: colors.pink }, + { name: "pct95", prop: ratio.ratioPct95Usd, color: colors.fuchsia }, + { name: "pct5", prop: ratio.ratioPct5Usd, color: colors.cyan }, + { name: "pct2", prop: ratio.ratioPct2Usd, color: colors.sky }, + { name: "pct1", prop: ratio.ratioPct1Usd, color: colors.blue }, + ]; + + // Percentile ratio mappings + const percentileMap = [ + { name: "pct99", prop: ratio.ratioPct99, color: colors.rose }, + { name: "pct98", prop: ratio.ratioPct98, color: colors.pink }, + { name: "pct95", prop: ratio.ratioPct95, color: colors.fuchsia }, + { name: "pct5", prop: ratio.ratioPct5, color: colors.cyan }, + { name: "pct2", prop: ratio.ratioPct2, color: colors.sky }, + { name: "pct1", prop: ratio.ratioPct1, color: colors.blue }, + ]; + + // SD patterns by window + const sdPatterns = [ + { nameAddon: "all", titleAddon: "", sd: ratio.ratioSd }, + { nameAddon: "4y", titleAddon: "4y", sd: ratio.ratio4ySd }, + { nameAddon: "2y", titleAddon: "2y", sd: ratio.ratio2ySd }, + { nameAddon: "1y", titleAddon: "1y", sd: ratio.ratio1ySd }, + ]; + + /** @param {Ratio1ySdPattern} sd */ + const getSdBands = (sd) => [ + { name: "0σ", prop: sd._0sdUsd, color: colors.lime }, + { name: "+0.5σ", prop: sd.p05sdUsd, color: colors.yellow }, + { name: "+1σ", prop: sd.p1sdUsd, color: colors.amber }, + { name: "+1.5σ", prop: sd.p15sdUsd, color: colors.orange }, + { name: "+2σ", prop: sd.p2sdUsd, color: colors.red }, + { name: "+2.5σ", prop: sd.p25sdUsd, color: colors.rose }, + { name: "+3σ", prop: sd.p3sd, color: colors.pink }, + { name: "−0.5σ", prop: sd.m05sdUsd, color: colors.teal }, + { name: "−1σ", prop: sd.m1sdUsd, color: colors.cyan }, + { name: "−1.5σ", prop: sd.m15sdUsd, color: colors.sky }, + { name: "−2σ", prop: sd.m2sdUsd, color: colors.blue }, + { name: "−2.5σ", prop: sd.m25sdUsd, color: colors.indigo }, + { name: "−3σ", prop: sd.m3sd, color: colors.violet }, + ]; + + return [ + { + name: "price", + title, + top: [s({ metric: price, name: legend, color, unit: "usd" })], + }, + { + name: "Ratio", + title: `${title} Ratio`, + top: [ + s({ metric: price, name: legend, color, unit: "usd" }), + ...percentileUsdMap.map(({ name: pctName, prop, color: pctColor }) => + s({ + metric: prop, + name: pctName, + color: pctColor, + defaultActive: false, + unit: "usd", + options: { lineStyle: 1 }, + }), + ), + ], + bottom: [ + s({ metric: ratio.ratio, name: "ratio", color, unit: "ratio" }), + s({ metric: ratio.ratio1wSma, name: "1w sma", color: colors.lime, unit: "ratio" }), + s({ metric: ratio.ratio1mSma, name: "1m sma", color: colors.teal, unit: "ratio" }), + s({ metric: ratio.ratio1ySd.sma, name: "1y sma", color: colors.sky, unit: "ratio" }), + s({ metric: ratio.ratio2ySd.sma, name: "2y sma", color: colors.indigo, unit: "ratio" }), + s({ metric: ratio.ratio4ySd.sma, name: "4y sma", color: colors.purple, unit: "ratio" }), + s({ metric: ratio.ratioSd.sma, name: "all sma", color: colors.rose, unit: "ratio" }), + ...percentileMap.map(({ name: pctName, prop, color: pctColor }) => + s({ + metric: prop, + name: pctName, + color: pctColor, + defaultActive: false, + unit: "ratio", + options: { lineStyle: 1 }, + }), + ), + createPriceLine({ unit: "ratio", number: 1 }), + ], + }, + { + name: "ZScores", + tree: sdPatterns.map(({ nameAddon, titleAddon, sd }) => ({ + name: nameAddon, + title: `${title} ${titleAddon} Z-Score`, + top: getSdBands(sd).map(({ name: bandName, prop, color: bandColor }) => + s({ metric: prop, name: bandName, color: bandColor, unit: "usd" }), + ), + bottom: [ + s({ metric: sd.zscore, name: "zscore", color, unit: "sd" }), + createPriceLine({ unit: "sd", number: 3 }), + createPriceLine({ unit: "sd", number: 2 }), + createPriceLine({ unit: "sd", number: 1 }), + createPriceLine({ unit: "sd", number: 0 }), + createPriceLine({ unit: "sd", number: -1 }), + createPriceLine({ unit: "sd", number: -2 }), + createPriceLine({ unit: "sd", number: -3 }), + ], + })), + }, + ]; +} + +/** + * Create Cointime section + * @param {PartialContext} ctx + * @returns {PartialOptionsGroup} + */ +export function createCointimeSection(ctx) { + const { colors, brk, s } = ctx; + const { cointime, distribution, supply } = brk.tree.computed; + const { pricing, cap, activity, supply: cointimeSupply, adjusted } = cointime; + const utxoCohorts = distribution.utxoCohorts; + + // Cointime prices data + const cointimePrices = [ + { + price: pricing.trueMarketMean, + ratio: pricing.trueMarketMeanRatio, + name: "True market mean", + title: "true market mean", + color: colors.blue, + }, + { + price: pricing.vaultedPrice, + ratio: pricing.vaultedPriceRatio, + name: "Vaulted", + title: "vaulted price", + color: colors.lime, + }, + { + price: pricing.activePrice, + ratio: pricing.activePriceRatio, + name: "Active", + title: "active price", + color: colors.rose, + }, + { + price: pricing.cointimePrice, + ratio: pricing.cointimePriceRatio, + name: "cointime", + title: "cointime price", + color: colors.yellow, + }, + ]; + + // Cointime capitalizations data + const cointimeCapitalizations = [ + { metric: cap.vaultedCap, name: "vaulted", title: "vaulted Capitalization", color: colors.lime }, + { metric: cap.activeCap, name: "active", title: "active Capitalization", color: colors.rose }, + { metric: cap.cointimeCap, name: "cointime", title: "cointime Capitalization", color: colors.yellow }, + { metric: cap.investorCap, name: "investor", title: "investor Capitalization", color: colors.fuchsia }, + { metric: cap.thermoCap, name: "thermo", title: "thermo Capitalization", color: colors.emerald }, + ]; + + return { + name: "Cointime", + tree: [ + // Prices + { + name: "Prices", + tree: [ + { + name: "Compare", + title: "Compare Cointime Prices", + top: cointimePrices.map(({ price, name, color }) => + s({ metric: price, name, color, unit: "usd" }), + ), + }, + ...cointimePrices.map(({ price, ratio, name, color, title }) => ({ + name, + tree: createCointimePriceWithRatioOptions(ctx, { + price, + ratio, + legend: name, + color, + name, + title, + }), + })), + ], + }, + + // Capitalization + { + name: "Capitalization", + tree: [ + { + name: "Compare", + title: "Compare Cointime Capitalizations", + bottom: [ + s({ metric: supply.marketCap.height, name: "Market", color: colors.default, unit: "usd" }), + s({ metric: utxoCohorts.all.realized.realizedCap, name: "Realized", color: colors.orange, unit: "usd" }), + ...cointimeCapitalizations.map(({ metric, name, color }) => + s({ metric, name, color, unit: "usd" }), + ), + ], + }, + ...cointimeCapitalizations.map(({ metric, name, color, title }) => ({ + name, + title, + bottom: [ + s({ metric, name, color, unit: "usd" }), + s({ metric: supply.marketCap.height, name: "Market", color: colors.default, unit: "usd" }), + s({ metric: utxoCohorts.all.realized.realizedCap, name: "Realized", color: colors.orange, unit: "usd" }), + ], + })), + ], + }, + + // Supply + { + name: "Supply", + title: "Cointime Supply", + bottom: /** @type {const} */ ([ + [utxoCohorts.all.supply.supply, "all", colors.orange], + [cointimeSupply.vaultedSupply, "vaulted", colors.lime], + [cointimeSupply.activeSupply, "active", colors.rose], + ]).flatMap(([supplyItem, name, color]) => [ + s({ metric: supplyItem.sats, name, color, unit: "sats" }), + s({ metric: supplyItem.bitcoin, name, color, unit: "btc" }), + s({ metric: supplyItem.dollars, name, color, unit: "usd" }), + ]), + }, + + // Liveliness & Vaultedness + { + name: "Liveliness & Vaultedness", + title: "Liveliness & Vaultedness", + bottom: [ + s({ metric: activity.liveliness, name: "Liveliness", color: colors.rose, unit: "ratio" }), + s({ metric: activity.vaultedness, name: "Vaultedness", color: colors.lime, unit: "ratio" }), + s({ metric: activity.activityToVaultednessRatio, name: "Liveliness / Vaultedness", color: colors.purple, unit: "ratio" }), + ], + }, + + // Coinblocks + { + name: "Coinblocks", + title: "Coinblocks", + bottom: [ + // Destroyed comes from the all cohort's activity + s({ metric: utxoCohorts.all.activity.coinblocksDestroyed.base, name: "Destroyed", color: colors.red, unit: "coinblocks" }), + s({ metric: utxoCohorts.all.activity.coinblocksDestroyed.cumulative, name: "Cumulative Destroyed", color: colors.red, defaultActive: false, unit: "coinblocks" }), + // Created and stored from cointime + s({ metric: activity.coinblocksCreated.base, name: "created", color: colors.orange, unit: "coinblocks" }), + s({ metric: activity.coinblocksCreated.cumulative, name: "Cumulative created", color: colors.orange, defaultActive: false, unit: "coinblocks" }), + s({ metric: activity.coinblocksStored.base, name: "stored", color: colors.green, unit: "coinblocks" }), + s({ metric: activity.coinblocksStored.cumulative, name: "Cumulative stored", color: colors.green, defaultActive: false, unit: "coinblocks" }), + ], + }, + + // Adjusted metrics + { + name: "Adjusted", + tree: [ + // Inflation + { + name: "inflation", + title: "Cointime-Adjusted inflation rate", + bottom: [ + s({ metric: supply.inflation.indexes, name: "base", color: colors.orange, unit: "percentage" }), + s({ metric: adjusted.cointimeAdjInflationRate, name: "adjusted", color: colors.purple, unit: "percentage" }), + ], + }, + // Velocity + { + name: "Velocity", + title: "Cointime-Adjusted transactions velocity", + bottom: [ + s({ metric: supply.velocity.btc, name: "btc", color: colors.orange, unit: "ratio" }), + s({ metric: adjusted.cointimeAdjTxBtcVelocity, name: "adj. btc", color: colors.red, unit: "ratio" }), + s({ metric: supply.velocity.usd, name: "usd", color: colors.emerald, unit: "ratio" }), + s({ metric: adjusted.cointimeAdjTxUsdVelocity, name: "adj. usd", color: colors.lime, unit: "ratio" }), + ], + }, + ], + }, + ], + }; +} diff --git a/websites/bitview/scripts/options/partial/colors/cohorts.js b/websites/bitview/scripts/options/partial/colors/cohorts.js new file mode 100644 index 000000000..de194ea7c --- /dev/null +++ b/websites/bitview/scripts/options/partial/colors/cohorts.js @@ -0,0 +1,152 @@ +/** Cohort color mappings */ + +/** @type {Readonly>} */ +export const termColors = { + short: "yellow", + long: "fuchsia", +}; + +/** @type {Readonly>} */ +export const maxAgeColors = { + _1w: "red", + _1m: "orange", + _2m: "amber", + _3m: "yellow", + _4m: "lime", + _5m: "green", + _6m: "teal", + _1y: "sky", + _2y: "indigo", + _3y: "violet", + _4y: "purple", + _5y: "fuchsia", + _6y: "pink", + _7y: "red", + _8y: "orange", + _10y: "amber", + _12y: "yellow", + _15y: "lime", +}; + +/** @type {Readonly>} */ +export const minAgeColors = { + _1d: "red", + _1w: "orange", + _1m: "yellow", + _2m: "lime", + _3m: "green", + _4m: "teal", + _5m: "cyan", + _6m: "blue", + _1y: "indigo", + _2y: "violet", + _3y: "purple", + _4y: "fuchsia", + _5y: "pink", + _6y: "rose", + _7y: "red", + _8y: "orange", + _10y: "yellow", + _12y: "lime", +}; + +/** @type {Readonly>} */ +export const ageRangeColors = { + upTo1d: "pink", + _1dTo1w: "red", + _1wTo1m: "orange", + _1mTo2m: "yellow", + _2mTo3m: "yellow", + _3mTo4m: "lime", + _4mTo5m: "lime", + _5mTo6m: "lime", + _6mTo1y: "green", + _1yTo2y: "cyan", + _2yTo3y: "blue", + _3yTo4y: "indigo", + _4yTo5y: "violet", + _5yTo6y: "purple", + _6yTo7y: "purple", + _7yTo8y: "fuchsia", + _8yTo10y: "fuchsia", + _10yTo12y: "pink", + _12yTo15y: "red", + from15y: "orange", +}; + +/** @type {Readonly>} */ +export const epochColors = { + _0: "red", + _1: "yellow", + _2: "orange", + _3: "lime", + _4: "green", +}; + +/** @type {Readonly>} */ +export const geAmountColors = { + _1sat: "orange", + _10sats: "orange", + _100sats: "yellow", + _1kSats: "lime", + _10kSats: "green", + _100kSats: "cyan", + _1mSats: "blue", + _10mSats: "indigo", + _1btc: "purple", + _10btc: "violet", + _100btc: "fuchsia", + _1kBtc: "pink", + _10kBtc: "red", +}; + +/** @type {Readonly>} */ +export const ltAmountColors = { + _10sats: "orange", + _100sats: "yellow", + _1kSats: "lime", + _10kSats: "green", + _100kSats: "cyan", + _1mSats: "blue", + _10mSats: "indigo", + _1btc: "purple", + _10btc: "violet", + _100btc: "fuchsia", + _1kBtc: "pink", + _10kBtc: "red", + _100kBtc: "orange", +}; + +/** @type {Readonly>} */ +export const amountRangeColors = { + _0sats: "red", + _1satTo10sats: "orange", + _10satsTo100sats: "yellow", + _100satsTo1kSats: "lime", + _1kSatsTo10kSats: "green", + _10kSatsTo100kSats: "cyan", + _100kSatsTo1mSats: "blue", + _1mSatsTo10mSats: "indigo", + _10mSatsTo1btc: "purple", + _1btcTo10btc: "violet", + _10btcTo100btc: "fuchsia", + _100btcTo1kBtc: "pink", + _1kBtcTo10kBtc: "red", + _10kBtcTo100kBtc: "orange", + _100kBtcOrMore: "yellow", +}; + +/** @type {Readonly>} */ +export const spendableTypeColors = { + p2pk65: "red", + p2pk33: "orange", + p2pkh: "yellow", + p2ms: "lime", + p2sh: "green", + p2wpkh: "teal", + p2wsh: "blue", + p2tr: "indigo", + p2a: "purple", + unknown: "violet", + empty: "fuchsia", +}; diff --git a/websites/bitview/scripts/options/partial/colors/index.js b/websites/bitview/scripts/options/partial/colors/index.js new file mode 100644 index 000000000..38683d8d0 --- /dev/null +++ b/websites/bitview/scripts/options/partial/colors/index.js @@ -0,0 +1,14 @@ +// Re-export all color mappings +export { + termColors, + maxAgeColors, + minAgeColors, + ageRangeColors, + epochColors, + geAmountColors, + ltAmountColors, + amountRangeColors, + spendableTypeColors, +} from "./cohorts.js"; + +export { averageColors, dcaColors } from "./misc.js"; diff --git a/websites/bitview/scripts/options/partial/colors/misc.js b/websites/bitview/scripts/options/partial/colors/misc.js new file mode 100644 index 000000000..c03e1968a --- /dev/null +++ b/websites/bitview/scripts/options/partial/colors/misc.js @@ -0,0 +1,42 @@ +/** Miscellaneous color mappings for DCA and averages */ + +/** + * Moving average period colors + * Format: [periodId, days, colorName] + * @type {readonly [string, number, ColorName][]} + */ +export const averageColors = [ + ["1w", 7, "red"], + ["8d", 8, "orange"], + ["13d", 13, "amber"], + ["21d", 21, "yellow"], + ["1m", 30, "lime"], + ["34d", 34, "green"], + ["55d", 55, "emerald"], + ["89d", 89, "teal"], + ["144d", 144, "cyan"], + ["200d", 200, "sky"], + ["1y", 365, "blue"], + ["2y", 730, "indigo"], + ["200w", 1400, "violet"], + ["4y", 1460, "purple"], +]; + +/** + * DCA class colors by year + * Format: [year, colorName, defaultActive] + * @type {readonly [number, ColorName, boolean][]} + */ +export const dcaColors = [ + [2015, "pink", false], + [2016, "red", false], + [2017, "orange", true], + [2018, "yellow", true], + [2019, "green", true], + [2020, "teal", true], + [2021, "sky", true], + [2022, "blue", true], + [2023, "purple", true], + [2024, "fuchsia", true], + [2025, "pink", true], +]; diff --git a/websites/bitview/scripts/options/partial/constants.js b/websites/bitview/scripts/options/partial/constants.js new file mode 100644 index 000000000..eba1a2c1e --- /dev/null +++ b/websites/bitview/scripts/options/partial/constants.js @@ -0,0 +1,141 @@ +/** Constant helpers for creating price lines and reference lines */ + +/** + * Get constant pattern by number dynamically from tree + * Examples: 0 → constant0, 38.2 → constant382, -1 → constantMinus1 + * @param {BrkClient["tree"]["computed"]["constants"]} constants + * @param {number} num + * @returns {Constant0Pattern} + */ +export function getConstant(constants, num) { + const key = + num >= 0 + ? `constant${String(num).replace(".", "")}` + : `constantMinus${Math.abs(num)}`; + const constant = /** @type {Constant0Pattern | undefined} */ ( + /** @type {Record>} */ (constants)[key] + ); + if (!constant) throw new Error(`Unknown constant: ${num} (key: ${key})`); + return constant; +} + +/** + * Flatten a Constant0Pattern into a simple MetricAccessor + * Constant0Pattern has { dateindex: { by: {...} }, height: { by: {...} }, ... } + * This flattens it to { by: { dateindex: MetricNode, height: MetricNode, ... } } + * @param {Constant0Pattern} pattern + * @returns {MetricAccessor} + */ +export function flattenConstant(pattern) { + return { + by: { + dateindex: pattern.dateindex.by.dateindex, + decadeindex: pattern.decadeindex.by.decadeindex, + height: pattern.height.by.height, + monthindex: pattern.monthindex.by.monthindex, + quarterindex: pattern.quarterindex.by.quarterindex, + semesterindex: pattern.semesterindex.by.semesterindex, + weekindex: pattern.weekindex.by.weekindex, + yearindex: pattern.yearindex.by.yearindex, + }, + indexes() { + return /** @type {Index[]} */ (Object.keys(this.by)); + }, + }; +} + +/** + * Create a price line series (horizontal reference line) + * @param {Object} args + * @param {BrkClient["tree"]["computed"]["constants"]} args.constants + * @param {Colors} args.colors + * @param {number} [args.number] + * @param {string} [args.name] + * @param {boolean} [args.defaultActive] + * @param {number} [args.lineStyle] + * @param {Color} [args.color] + * @param {Unit} args.unit + * @returns {FetchedLineSeriesBlueprint} + */ +export function createPriceLine({ + constants, + colors, + number = 0, + unit, + defaultActive, + color, + name, + lineStyle, +}) { + return { + metric: flattenConstant(getConstant(constants, number)), + title: name ?? `${number}`, + unit, + defaultActive, + color: color ?? colors.gray, + options: { + lineStyle: lineStyle ?? 4, + lastValueVisible: false, + crosshairMarkerVisible: false, + }, + }; +} + +/** + * Create multiple price lines from an array of numbers + * @param {Object} args + * @param {BrkClient["tree"]["computed"]["constants"]} args.constants + * @param {Colors} args.colors + * @param {number[]} args.numbers + * @param {Unit} args.unit + * @returns {FetchedLineSeriesBlueprint[]} + */ +export function createPriceLines({ constants, colors, numbers, unit }) { + return numbers.map((number) => ({ + metric: flattenConstant(getConstant(constants, number)), + title: `${number}`, + unit, + defaultActive: !number, + color: colors.gray, + options: { + lineStyle: 4, + lastValueVisible: false, + crosshairMarkerVisible: false, + }, + })); +} + +/** + * Create a constant line series + * @param {Object} args + * @param {Colors} args.colors + * @param {Constant0Pattern} args.constant + * @param {string} args.name + * @param {Unit} args.unit + * @param {Color} [args.color] + * @param {number} [args.lineStyle] + * @param {boolean} [args.defaultActive] + * @returns {FetchedLineSeriesBlueprint} + */ +export function line({ + colors, + constant, + name, + unit, + color, + lineStyle, + defaultActive, +}) { + return { + metric: flattenConstant(constant), + title: name, + unit, + defaultActive, + color: color ?? colors.gray, + options: { + lineStyle: lineStyle ?? 4, + lastValueVisible: false, + crosshairMarkerVisible: false, + }, + }; +} diff --git a/websites/bitview/scripts/options/partial/context.js b/websites/bitview/scripts/options/partial/context.js new file mode 100644 index 000000000..ab290ab08 --- /dev/null +++ b/websites/bitview/scripts/options/partial/context.js @@ -0,0 +1,44 @@ +import { s, fromBlockCount, fromBitcoin, fromBlockSize } from "./series.js"; +import { + getConstant, + flattenConstant, + createPriceLine, + createPriceLines, + line, +} from "./constants.js"; + +/** + * Create a context object with all dependencies for building partial options + * @param {Object} args + * @param {Colors} args.colors + * @param {BrkClient} args.brk + * @returns {PartialContext} + */ +export function createContext({ colors, brk }) { + const constants = brk.tree.computed.constants; + const constant100 = flattenConstant(constants.constant100); + + return { + colors, + brk, + constants, + constant100, + + // Series helpers + s, + fromBlockCount: (pattern, title, color) => + fromBlockCount(colors, pattern, title, color), + fromBitcoin: (pattern, title, color) => + fromBitcoin(colors, pattern, title, color), + fromBlockSize: (pattern, title, color) => + fromBlockSize(colors, pattern, title, color), + + // Constant helpers + getConstant: (num) => getConstant(constants, num), + flattenConstant, + createPriceLine: (args) => createPriceLine({ constants, colors, ...args }), + createPriceLines: (args) => + createPriceLines({ constants, colors, ...args }), + line: (args) => line({ colors, ...args }), + }; +} diff --git a/websites/bitview/scripts/options/partial/index.js b/websites/bitview/scripts/options/partial/index.js new file mode 100644 index 000000000..a59768cd2 --- /dev/null +++ b/websites/bitview/scripts/options/partial/index.js @@ -0,0 +1,357 @@ +/** Partial options - Main entry point */ + +import { localhost } from "../../utils/env.js"; +import { createContext } from "./context.js"; +import { + buildCohortData, + createUtxoCohortFolder, + createAddressCohortFolder, +} from "./cohorts/index.js"; +import { createMarketSection } from "./market.js"; +import { createChainSection } from "./chain.js"; +import { createCointimeSection } from "./cointime.js"; + +// Re-export types for external consumers +export * from "./types.js"; + +/** + * Create partial options tree + * @param {Object} args + * @param {Colors} args.colors + * @param {BrkClient} args.brk + * @returns {PartialOptionsTree} + */ +export function createPartialOptions({ colors, brk }) { + // Create context with all helpers + const ctx = createContext({ colors, brk }); + + // Build cohort data + const { + cohortAll, + cohortAllForComparison, + terms, + upToDate, + fromDate, + dateRange, + epoch, + utxosAboveAmount, + addressesAboveAmount, + utxosUnderAmount, + addressesUnderAmount, + utxosAmountRanges, + addressesAmountRanges, + type, + } = buildCohortData(colors, brk); + + // Helper to map UTXO cohorts + const mapUtxoCohorts = (/** @type {any} */ cohort) => createUtxoCohortFolder(ctx, cohort); + + // Helper to map Address cohorts + const mapAddressCohorts = (/** @type {any} */ cohort) => createAddressCohortFolder(ctx, cohort); + + return [ + // Debug explorer (localhost only) + ...(localhost + ? [ + { + kind: /** @type {const} */ ("explorer"), + name: "Explorer", + title: "Debug explorer", + }, + ] + : []), + + // Charts section + { + name: "Charts", + tree: [ + // Market section + createMarketSection(ctx), + + // Chain section + createChainSection(ctx), + + // Cohorts section + { + name: "Cohorts", + tree: [ + // All UTXOs + createUtxoCohortFolder(ctx, cohortAll), + + // Terms (STH/LTH) + { + name: "terms", + tree: [ + createUtxoCohortFolder(ctx, { + name: "Compare", + title: "UTXOs Term", + list: [...terms, cohortAllForComparison], + }), + ...terms.map(mapUtxoCohorts), + ], + }, + + // Epochs + { + name: "Epochs", + tree: [ + createUtxoCohortFolder(ctx, { + name: "Compare", + title: "Epoch", + list: [...epoch, cohortAllForComparison], + }), + ...epoch.map(mapUtxoCohorts), + ], + }, + + // Types + { + name: "types", + tree: [ + createUtxoCohortFolder(ctx, { + name: "Compare", + title: "Type", + list: [...type, cohortAllForComparison], + }), + ...type.map(mapUtxoCohorts), + ], + }, + + // UTXOs Up to age + { + name: "UTXOs Up to age", + tree: [ + createUtxoCohortFolder(ctx, { + name: "Compare", + title: "UTXOs Up To Age", + list: [...upToDate, cohortAllForComparison], + }), + ...upToDate.map(mapUtxoCohorts), + ], + }, + + // UTXOs from age + { + name: "UTXOs from age", + tree: [ + createUtxoCohortFolder(ctx, { + name: "Compare", + title: "UTXOs from age", + list: [...fromDate, cohortAllForComparison], + }), + ...fromDate.map(mapUtxoCohorts), + ], + }, + + // UTXOs age ranges + { + name: "UTXOs age Ranges", + tree: [ + createUtxoCohortFolder(ctx, { + name: "Compare", + title: "UTXOs Age Range", + list: [...dateRange, cohortAllForComparison], + }), + ...dateRange.map(mapUtxoCohorts), + ], + }, + + // UTXOs under amounts + { + name: "UTXOs under amounts", + tree: [ + createUtxoCohortFolder(ctx, { + name: "Compare", + title: "UTXOs under amount", + list: [...utxosUnderAmount, cohortAllForComparison], + }), + ...utxosUnderAmount.map(mapUtxoCohorts), + ], + }, + + // UTXOs above amounts + { + name: "UTXOs Above Amounts", + tree: [ + createUtxoCohortFolder(ctx, { + name: "Compare", + title: "UTXOs Above Amount", + list: [...utxosAboveAmount, cohortAllForComparison], + }), + ...utxosAboveAmount.map(mapUtxoCohorts), + ], + }, + + // UTXOs between amounts + { + name: "UTXOs between amounts", + tree: [ + createUtxoCohortFolder(ctx, { + name: "Compare", + title: "UTXOs between amounts", + list: [...utxosAmountRanges, cohortAllForComparison], + }), + ...utxosAmountRanges.map(mapUtxoCohorts), + ], + }, + + // Addresses under amount (TYPE SAFE - uses createAddressCohortFolder!) + { + name: "Addresses under amount", + tree: [ + createAddressCohortFolder(ctx, { + name: "Compare", + title: "Addresses under Amount", + list: addressesUnderAmount, + }), + ...addressesUnderAmount.map(mapAddressCohorts), + ], + }, + + // Addresses above amount (TYPE SAFE - uses createAddressCohortFolder!) + { + name: "Addresses above amount", + tree: [ + createAddressCohortFolder(ctx, { + name: "Compare", + title: "Addresses above amount", + list: addressesAboveAmount, + }), + ...addressesAboveAmount.map(mapAddressCohorts), + ], + }, + + // Addresses between amounts (TYPE SAFE - uses createAddressCohortFolder!) + { + name: "Addresses between amounts", + tree: [ + createAddressCohortFolder(ctx, { + name: "Compare", + title: "Addresses between amounts", + list: addressesAmountRanges, + }), + ...addressesAmountRanges.map(mapAddressCohorts), + ], + }, + ], + }, + + // Cointime section + createCointimeSection(ctx), + ], + }, + + // Table section + { + kind: /** @type {const} */ ("table"), + title: "Table", + name: "Table", + }, + + // Simulations section + { + name: "Simulations", + tree: [ + { + kind: /** @type {const} */ ("simulation"), + name: "Save In Bitcoin", + title: "Save In Bitcoin", + }, + ], + }, + + // Tools section + { + name: "Tools", + tree: [ + { + name: "Documentation", + tree: [ + { + name: "API", + url: () => "/api", + title: "API documentation", + }, + { + name: "MCP", + url: () => + "https://github.com/bitcoinresearchkit/brk/blob/main/crates/brk_mcp/README.md#brk_mcp", + title: "Model Context Protocol documentation", + }, + { + name: "Crate", + url: () => "/crate", + title: "View on crates.io", + }, + { + name: "Source", + url: () => "/github", + title: "Source code and issues", + }, + { + name: "Changelog", + url: () => "/changelog", + title: "Release notes and changelog", + }, + ], + }, + { + name: "Hosting", + tree: [ + { + name: "Status", + url: () => "/status", + title: "Service status and uptime", + }, + { + name: "Self-host", + url: () => "/install", + title: "Install and run yourself", + }, + { + name: "Service", + url: () => "/service", + title: "Hosted service offering", + }, + ], + }, + { + name: "Community", + tree: [ + { + name: "Discord", + url: () => "/discord", + title: "Join the Discord server", + }, + { + name: "GitHub", + url: () => "/github", + title: "Source code and issues", + }, + { + name: "Nostr", + url: () => "/nostr", + title: "Follow on Nostr", + }, + ], + }, + ], + }, + + // Donate + { + name: "Donate", + qrcode: true, + url: () => "bitcoin:bc1q098zsm89m7kgyze338vfejhpdt92ua9p3peuve", + title: "Bitcoin address for donations", + }, + + // Share + { + name: "Share", + qrcode: true, + url: () => window.location.href, + title: "Share", + }, + ]; +} diff --git a/websites/bitview/scripts/options/partial/market.js b/websites/bitview/scripts/options/partial/market.js new file mode 100644 index 000000000..cbbbaa10d --- /dev/null +++ b/websites/bitview/scripts/options/partial/market.js @@ -0,0 +1,480 @@ +/** Market section builder - typed tree-based patterns */ + +/** + * Convert period ID to readable name + * @param {string} id + * @param {boolean} [compoundAdjective] + */ +function periodIdToName(id, compoundAdjective) { + const suffix = compoundAdjective || parseInt(id) === 1 ? "" : "s"; + return id + .replace("d", ` day${suffix}`) + .replace("w", ` week${suffix}`) + .replace("m", ` month${suffix}`) + .replace("y", ` year${suffix}`); +} + +/** + * Create price with ratio options (for moving averages) + * @param {PartialContext} ctx + * @param {Object} args + * @param {string} args.name + * @param {string} args.title + * @param {string} args.legend + * @param {EmaRatioPattern} args.ratio + * @param {Color} [args.color] + * @returns {PartialOptionsTree} + */ +function createPriceWithRatioOptions(ctx, { name, title, legend, ratio, color }) { + const { s, colors, createPriceLine } = ctx; + const priceMetric = ratio.price; + + // Percentile USD mappings + const percentileUsdMap = [ + { name: "pct99", prop: ratio.ratioPct99Usd, color: colors.rose }, + { name: "pct98", prop: ratio.ratioPct98Usd, color: colors.pink }, + { name: "pct95", prop: ratio.ratioPct95Usd, color: colors.fuchsia }, + { name: "pct5", prop: ratio.ratioPct5Usd, color: colors.cyan }, + { name: "pct2", prop: ratio.ratioPct2Usd, color: colors.sky }, + { name: "pct1", prop: ratio.ratioPct1Usd, color: colors.blue }, + ]; + + // Percentile ratio mappings + const percentileMap = [ + { name: "pct99", prop: ratio.ratioPct99, color: colors.rose }, + { name: "pct98", prop: ratio.ratioPct98, color: colors.pink }, + { name: "pct95", prop: ratio.ratioPct95, color: colors.fuchsia }, + { name: "pct5", prop: ratio.ratioPct5, color: colors.cyan }, + { name: "pct2", prop: ratio.ratioPct2, color: colors.sky }, + { name: "pct1", prop: ratio.ratioPct1, color: colors.blue }, + ]; + + // SD patterns by window + const sdPatterns = [ + { nameAddon: "all", titleAddon: "", sd: ratio.ratioSd }, + { nameAddon: "4y", titleAddon: "4y", sd: ratio.ratio4ySd }, + { nameAddon: "2y", titleAddon: "2y", sd: ratio.ratio2ySd }, + { nameAddon: "1y", titleAddon: "1y", sd: ratio.ratio1ySd }, + ]; + + /** @param {Ratio1ySdPattern} sd */ + const getSdBands = (sd) => [ + { name: "0σ", prop: sd._0sdUsd, color: colors.lime }, + { name: "+0.5σ", prop: sd.p05sdUsd, color: colors.yellow }, + { name: "+1σ", prop: sd.p1sdUsd, color: colors.amber }, + { name: "+1.5σ", prop: sd.p15sdUsd, color: colors.orange }, + { name: "+2σ", prop: sd.p2sdUsd, color: colors.red }, + { name: "+2.5σ", prop: sd.p25sdUsd, color: colors.rose }, + { name: "+3σ", prop: sd.p3sd, color: colors.pink }, + { name: "−0.5σ", prop: sd.m05sdUsd, color: colors.teal }, + { name: "−1σ", prop: sd.m1sdUsd, color: colors.cyan }, + { name: "−1.5σ", prop: sd.m15sdUsd, color: colors.sky }, + { name: "−2σ", prop: sd.m2sdUsd, color: colors.blue }, + { name: "−2.5σ", prop: sd.m25sdUsd, color: colors.indigo }, + { name: "−3σ", prop: sd.m3sd, color: colors.violet }, + ]; + + return [ + { + name: "price", + title, + top: [s({ metric: priceMetric, name: legend, color, unit: "usd" })], + }, + { + name: "Ratio", + title: `${title} Ratio`, + top: [ + s({ metric: priceMetric, name: legend, color, unit: "usd" }), + ...percentileUsdMap.map(({ name: pctName, prop, color: pctColor }) => + s({ + metric: prop, + name: pctName, + color: pctColor, + defaultActive: false, + unit: "usd", + options: { lineStyle: 1 }, + }), + ), + ], + bottom: [ + s({ metric: ratio.ratio, name: "ratio", color, unit: "ratio" }), + s({ metric: ratio.ratio1wSma, name: "1w sma", color: colors.lime, unit: "ratio" }), + s({ metric: ratio.ratio1mSma, name: "1m sma", color: colors.teal, unit: "ratio" }), + s({ metric: ratio.ratio1ySd.sma, name: "1y sma", color: colors.sky, unit: "ratio" }), + s({ metric: ratio.ratio2ySd.sma, name: "2y sma", color: colors.indigo, unit: "ratio" }), + s({ metric: ratio.ratio4ySd.sma, name: "4y sma", color: colors.purple, unit: "ratio" }), + s({ metric: ratio.ratioSd.sma, name: "all sma", color: colors.rose, unit: "ratio" }), + ...percentileMap.map(({ name: pctName, prop, color: pctColor }) => + s({ + metric: prop, + name: pctName, + color: pctColor, + defaultActive: false, + unit: "ratio", + options: { lineStyle: 1 }, + }), + ), + createPriceLine({ unit: "ratio", number: 1 }), + ], + }, + { + name: "ZScores", + tree: sdPatterns.map(({ nameAddon, titleAddon, sd }) => ({ + name: nameAddon, + title: `${title} ${titleAddon} Z-Score`, + top: getSdBands(sd).map(({ name: bandName, prop, color: bandColor }) => + s({ metric: prop, name: bandName, color: bandColor, unit: "usd" }), + ), + bottom: [ + s({ metric: sd.zscore, name: "zscore", color, unit: "sd" }), + createPriceLine({ unit: "sd", number: 3 }), + createPriceLine({ unit: "sd", number: 2 }), + createPriceLine({ unit: "sd", number: 1 }), + createPriceLine({ unit: "sd", number: 0 }), + createPriceLine({ unit: "sd", number: -1 }), + createPriceLine({ unit: "sd", number: -2 }), + createPriceLine({ unit: "sd", number: -3 }), + ], + })), + }, + ]; +} + +/** + * Build averages data array from market patterns + * @param {Colors} colors + * @param {MarketMovingAverage} ma + */ +function buildAverages(colors, ma) { + return /** @type {const} */ ([ + ["1w", 7, "red", ma.price1wSma, ma.price1wEma], + ["8d", 8, "orange", ma.price8dSma, ma.price8dEma], + ["13d", 13, "amber", ma.price13dSma, ma.price13dEma], + ["21d", 21, "yellow", ma.price21dSma, ma.price21dEma], + ["1m", 30, "lime", ma.price1mSma, ma.price1mEma], + ["34d", 34, "green", ma.price34dSma, ma.price34dEma], + ["55d", 55, "emerald", ma.price55dSma, ma.price55dEma], + ["89d", 89, "teal", ma.price89dSma, ma.price89dEma], + ["144d", 144, "cyan", ma.price144dSma, ma.price144dEma], + ["200d", 200, "sky", ma.price200dSma, ma.price200dEma], + ["1y", 365, "blue", ma.price1ySma, ma.price1yEma], + ["2y", 730, "indigo", ma.price2ySma, ma.price2yEma], + ["200w", 1400, "violet", ma.price200wSma, ma.price200wEma], + ["4y", 1460, "purple", ma.price4ySma, ma.price4yEma], + ]).map(([id, days, colorKey, sma, ema]) => ({ + id, + name: periodIdToName(id, true), + days, + color: colors[colorKey], + sma, + ema, + })); +} + +/** + * Build DCA classes data array + * @param {Colors} colors + * @param {MarketDca} dca + */ +function buildDcaClasses(colors, dca) { + return /** @type {const} */ ([ + [2015, "pink", false, dca.dcaClass2015AvgPrice, dca.dcaClass2015Returns, dca.dcaClass2015Stack], + [2016, "red", false, dca.dcaClass2016AvgPrice, dca.dcaClass2016Returns, dca.dcaClass2016Stack], + [2017, "orange", true, dca.dcaClass2017AvgPrice, dca.dcaClass2017Returns, dca.dcaClass2017Stack], + [2018, "yellow", true, dca.dcaClass2018AvgPrice, dca.dcaClass2018Returns, dca.dcaClass2018Stack], + [2019, "green", true, dca.dcaClass2019AvgPrice, dca.dcaClass2019Returns, dca.dcaClass2019Stack], + [2020, "teal", true, dca.dcaClass2020AvgPrice, dca.dcaClass2020Returns, dca.dcaClass2020Stack], + [2021, "sky", true, dca.dcaClass2021AvgPrice, dca.dcaClass2021Returns, dca.dcaClass2021Stack], + [2022, "blue", true, dca.dcaClass2022AvgPrice, dca.dcaClass2022Returns, dca.dcaClass2022Stack], + [2023, "purple", true, dca.dcaClass2023AvgPrice, dca.dcaClass2023Returns, dca.dcaClass2023Stack], + [2024, "fuchsia", true, dca.dcaClass2024AvgPrice, dca.dcaClass2024Returns, dca.dcaClass2024Stack], + [2025, "pink", true, dca.dcaClass2025AvgPrice, dca.dcaClass2025Returns, dca.dcaClass2025Stack], + ]).map(([year, colorKey, defaultActive, avgPrice, returns, stack]) => ({ + year, + color: colors[colorKey], + defaultActive, + avgPrice, + returns, + stack, + })); +} + +/** + * Create Market section + * @param {PartialContext} ctx + * @returns {PartialOptionsGroup} + */ +export function createMarketSection(ctx) { + const { colors, brk, s, createPriceLine } = ctx; + const { market, supply } = brk.tree.computed; + const { movingAverage, ath, returns, volatility, range, dca, lookback } = market; + + const averages = buildAverages(colors, movingAverage); + const dcaClasses = buildDcaClasses(colors, dca); + + return { + name: "Market", + tree: [ + // Price (empty chart, shows candlesticks by default) + { + name: "Price", + title: "Bitcoin Price", + }, + + // Capitalization + { + name: "Capitalization", + title: "Market Capitalization", + bottom: [s({ metric: supply.marketCap.indexes, name: "Capitalization", unit: "usd" })], + }, + + // All Time High + { + name: "All Time High", + title: "All Time High", + top: [s({ metric: ath.priceAth, name: "ath", unit: "usd" })], + bottom: [ + s({ metric: ath.priceDrawdown, name: "Drawdown", color: colors.red, unit: "percentage" }), + s({ metric: ath.daysSincePriceAth, name: "since", unit: "days" }), + s({ metric: ath.maxDaysBetweenPriceAths, name: "Max", color: colors.red, unit: "days" }), + s({ metric: ath.maxYearsBetweenPriceAths, name: "Max", color: colors.red, unit: "years" }), + ], + }, + + // Averages + { + name: "Averages", + tree: [ + { nameAddon: "Simple", metricAddon: /** @type {const} */ ("sma") }, + { nameAddon: "Exponential", metricAddon: /** @type {const} */ ("ema") }, + ].map(({ nameAddon, metricAddon }) => ({ + name: nameAddon, + tree: [ + { + name: "Compare", + title: `Market Price ${nameAddon} Moving Averages`, + top: averages.map(({ id, color, sma, ema }) => + s({ + metric: (metricAddon === "sma" ? sma : ema).price, + name: id, + color, + unit: "usd", + }), + ), + }, + ...averages.map(({ name, color, sma, ema }) => ({ + name, + tree: createPriceWithRatioOptions(ctx, { + ratio: metricAddon === "sma" ? sma : ema, + name, + title: `${name} Market Price ${nameAddon} Moving Average`, + legend: "average", + color, + }), + })), + ], + })), + }, + + // Performance + { + name: "Performance", + tree: /** @type {const} */ ([ + ["1d", returns._1dPriceReturns, undefined], + ["1w", returns._1wPriceReturns, undefined], + ["1m", returns._1mPriceReturns, undefined], + ["3m", returns._3mPriceReturns, undefined], + ["6m", returns._6mPriceReturns, undefined], + ["1y", returns._1yPriceReturns, undefined], + ["2y", returns._2yPriceReturns, returns._2yCagr], + ["3y", returns._3yPriceReturns, returns._3yCagr], + ["4y", returns._4yPriceReturns, returns._4yCagr], + ["5y", returns._5yPriceReturns, returns._5yCagr], + ["6y", returns._6yPriceReturns, returns._6yCagr], + ["8y", returns._8yPriceReturns, returns._8yCagr], + ["10y", returns._10yPriceReturns, returns._10yCagr], + ]).map(([id, priceReturns, cagr]) => { + const name = periodIdToName(id, true); + return { + name, + title: `${name} Performance`, + bottom: [ + /** @type {AnyFetchedSeriesBlueprint} */ ({ + metric: priceReturns, + title: "total", + type: "Baseline", + unit: "percentage", + }), + ...(cagr + ? [ + /** @type {AnyFetchedSeriesBlueprint} */ ({ + metric: cagr, + title: "cagr", + type: "Baseline", + colors: [colors.lime, colors.pink], + unit: "percentage", + }), + ] + : []), + createPriceLine({ unit: "percentage" }), + ], + }; + }), + }, + + // Indicators + { + name: "Indicators", + tree: [ + // Volatility + { + name: "Volatility", + title: "Bitcoin Price Volatility Index", + bottom: [ + s({ metric: volatility.price1wVolatility, name: "1w", color: colors.red, unit: "percentage" }), + s({ metric: volatility.price1mVolatility, name: "1m", color: colors.orange, unit: "percentage" }), + s({ metric: volatility.price1yVolatility, name: "1y", color: colors.lime, unit: "percentage" }), + ], + }, + + // MinMax + { + name: "MinMax", + tree: [ + { id: "1w", title: "1 Week", min: range.price1wMin, max: range.price1wMax }, + { id: "2w", title: "2 Week", min: range.price2wMin, max: range.price2wMax }, + { id: "1m", title: "1 Month", min: range.price1mMin, max: range.price1mMax }, + { id: "1y", title: "1 Year", min: range.price1yMin, max: range.price1yMax }, + ].map(({ id, title, min, max }) => ({ + name: id, + title: `Bitcoin Price ${title} MinMax Bands`, + top: [ + s({ metric: min, name: "min", color: colors.red, unit: "usd" }), + s({ metric: max, name: "max", color: colors.green, unit: "usd" }), + ], + })), + }, + + // True range + { + name: "True range", + title: "Bitcoin Price True Range", + bottom: [s({ metric: range.priceTrueRange, name: "value", color: colors.yellow, unit: "usd" })], + }, + + // Choppiness + { + name: "Choppiness", + title: "Bitcoin Price Choppiness Index", + bottom: [ + s({ metric: range.price2wChoppinessIndex, name: "2w", color: colors.red, unit: "index" }), + createPriceLine({ unit: "index", number: 61.8 }), + createPriceLine({ unit: "index", number: 38.2 }), + ], + }, + + // Mayer multiple + { + name: "Mayer multiple", + title: "Mayer multiple", + top: [ + s({ metric: movingAverage.price200dSma.price, name: "200d sma", color: colors.yellow, unit: "usd" }), + s({ metric: movingAverage.price200dSmaX24, name: "200d sma x2.4", color: colors.green, unit: "usd" }), + s({ metric: movingAverage.price200dSmaX08, name: "200d sma x0.8", color: colors.red, unit: "usd" }), + ], + }, + ], + }, + + // Investing + { + name: "Investing", + tree: [ + // DCA vs Lump sum + { + name: "DCA vs Lump sum", + tree: [ + .../** @type {const} */ ([ + ["1w", dca._1wDcaAvgPrice, lookback.price1wAgo, dca._1wDcaReturns, returns._1wPriceReturns], + ["1m", dca._1mDcaAvgPrice, lookback.price1mAgo, dca._1mDcaReturns, returns._1mPriceReturns], + ["3m", dca._3mDcaAvgPrice, lookback.price3mAgo, dca._3mDcaReturns, returns._3mPriceReturns], + ["6m", dca._6mDcaAvgPrice, lookback.price6mAgo, dca._6mDcaReturns, returns._6mPriceReturns], + ["1y", dca._1yDcaAvgPrice, lookback.price1yAgo, dca._1yDcaReturns, returns._1yPriceReturns], + ]).map(([id, dcaAvgPrice, priceAgo, dcaReturns, priceReturns]) => { + const name = periodIdToName(id, true); + return { + name, + tree: [ + { + name: "price", + title: `${name} DCA vs Lump Sum (Price)`, + top: [ + s({ metric: dcaAvgPrice, name: "DCA avg", color: colors.green, unit: "usd" }), + s({ metric: priceAgo, name: "Lump sum", color: colors.orange, unit: "usd" }), + ], + }, + { + name: "returns", + title: `${name} DCA vs Lump Sum (Returns)`, + bottom: [ + /** @type {AnyFetchedSeriesBlueprint} */ ({ + metric: dcaReturns, + title: "DCA", + type: "Baseline", + unit: "percentage", + }), + /** @type {AnyFetchedSeriesBlueprint} */ ({ + metric: priceReturns, + title: "Lump sum", + type: "Baseline", + colors: [colors.lime, colors.red], + unit: "percentage", + }), + createPriceLine({ unit: "percentage" }), + ], + }, + ], + }; + }), + ], + }, + + // DCA classes + { + name: "DCA classes", + tree: [ + { + name: "Average price", + title: "DCA Average Price by Year", + top: dcaClasses.map(({ year, color, defaultActive, avgPrice }) => + s({ metric: avgPrice, name: `${year}`, color, defaultActive, unit: "usd" }), + ), + }, + { + name: "Returns", + title: "DCA Returns by Year", + bottom: dcaClasses.map(({ year, color, defaultActive, returns }) => + /** @type {AnyFetchedSeriesBlueprint} */ ({ + metric: returns, + title: `${year}`, + type: "Baseline", + color, + defaultActive, + unit: "percentage", + }), + ), + }, + { + name: "Stack", + title: "DCA Stack by Year", + bottom: dcaClasses.map(({ year, color, defaultActive, stack }) => + s({ metric: stack, name: `${year}`, color, defaultActive, unit: "sats" }), + ), + }, + ], + }, + ], + }, + ], + }; +} diff --git a/websites/bitview/scripts/options/partial/series.js b/websites/bitview/scripts/options/partial/series.js new file mode 100644 index 000000000..93e1e8cc4 --- /dev/null +++ b/websites/bitview/scripts/options/partial/series.js @@ -0,0 +1,181 @@ +/** Series helpers for creating chart series blueprints */ + +/** + * Create a single series from a tree accessor + * @param {Object} args + * @param {MetricAccessor} args.metric - Tree accessor with .by property + * @param {string} args.name - Display name for the series + * @param {Color} [args.color] + * @param {Unit} [args.unit] + * @param {boolean} [args.defaultActive] + * @param {LineSeriesPartialOptions} [args.options] + * @returns {AnyFetchedSeriesBlueprint} + */ +export function s({ metric, name, color, defaultActive, unit, options }) { + return { + metric, + title: name, + color, + unit, + defaultActive, + options, + }; +} + +/** + * Create series from a BlockCountPattern ({ base, sum, cumulative }) + * @param {Colors} colors + * @param {BlockCountPattern} pattern + * @param {string} title + * @param {Color} [color] + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function fromBlockCount(colors, pattern, title, color) { + return [ + { metric: pattern.base, title, color: color ?? colors.default }, + { + metric: pattern.sum, + title: `${title} (sum)`, + color: colors.red, + defaultActive: false, + }, + { + metric: pattern.cumulative, + title: `${title} (cum.)`, + color: colors.cyan, + defaultActive: false, + }, + ]; +} + +/** + * Create series from a BitcoinPattern ({ base, sum, cumulative, average, min, max, median, pct* }) + * @param {Colors} colors + * @param {BitcoinPattern} pattern + * @param {string} title + * @param {Color} [color] + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function fromBitcoin(colors, pattern, title, color) { + return [ + { metric: pattern.base, title, color: color ?? colors.default }, + { metric: pattern.average, title: "Average", defaultActive: false }, + { + metric: pattern.sum, + title: `${title} (sum)`, + color: colors.red, + defaultActive: false, + }, + { + metric: pattern.cumulative, + title: `${title} (cum.)`, + color: colors.cyan, + defaultActive: false, + }, + { + metric: pattern.max, + title: "Max", + color: colors.pink, + defaultActive: false, + }, + { + metric: pattern.min, + title: "Min", + color: colors.green, + defaultActive: false, + }, + { + metric: pattern.median, + title: "Median", + color: colors.amber, + defaultActive: false, + }, + { + metric: pattern.pct75, + title: "pct75", + color: colors.red, + defaultActive: false, + }, + { + metric: pattern.pct25, + title: "pct25", + color: colors.yellow, + defaultActive: false, + }, + { + metric: pattern.pct90, + title: "pct90", + color: colors.rose, + defaultActive: false, + }, + { + metric: pattern.pct10, + title: "pct10", + color: colors.lime, + defaultActive: false, + }, + ]; +} + +/** + * Create series from a BlockSizePattern ({ sum, cumulative, average, min, max, median, pct* }) + * @param {Colors} colors + * @param {BlockSizePattern} pattern + * @param {string} title + * @param {Color} [color] + * @returns {AnyFetchedSeriesBlueprint[]} + */ +export function fromBlockSize(colors, pattern, title, color) { + return [ + { metric: pattern.sum, title, color: color ?? colors.default }, + { metric: pattern.average, title: "Average", defaultActive: false }, + { + metric: pattern.cumulative, + title: `${title} (cum.)`, + color: colors.cyan, + defaultActive: false, + }, + { + metric: pattern.max, + title: "Max", + color: colors.pink, + defaultActive: false, + }, + { + metric: pattern.min, + title: "Min", + color: colors.green, + defaultActive: false, + }, + { + metric: pattern.median, + title: "Median", + color: colors.amber, + defaultActive: false, + }, + { + metric: pattern.pct75, + title: "pct75", + color: colors.red, + defaultActive: false, + }, + { + metric: pattern.pct25, + title: "pct25", + color: colors.yellow, + defaultActive: false, + }, + { + metric: pattern.pct90, + title: "pct90", + color: colors.rose, + defaultActive: false, + }, + { + metric: pattern.pct10, + title: "pct10", + color: colors.lime, + defaultActive: false, + }, + ]; +} diff --git a/websites/bitview/scripts/options/partial/types.js b/websites/bitview/scripts/options/partial/types.js new file mode 100644 index 000000000..bf1535cf3 --- /dev/null +++ b/websites/bitview/scripts/options/partial/types.js @@ -0,0 +1,185 @@ +/** + * @typedef {Object} BaseSeriesBlueprint + * @property {string} title + * @property {boolean} [defaultActive] + * + * @typedef {Object} BaselineSeriesBlueprintSpecific + * @property {"Baseline"} type + * @property {Color} [color] + * @property {[Color, Color]} [colors] + * @property {BaselineSeriesPartialOptions} [options] + * @property {Accessor} [data] + * @typedef {BaseSeriesBlueprint & BaselineSeriesBlueprintSpecific} BaselineSeriesBlueprint + * + * @typedef {Object} CandlestickSeriesBlueprintSpecific + * @property {"Candlestick"} type + * @property {Color} [color] + * @property {CandlestickSeriesPartialOptions} [options] + * @property {Accessor} [data] + * @typedef {BaseSeriesBlueprint & CandlestickSeriesBlueprintSpecific} CandlestickSeriesBlueprint + * + * @typedef {Object} LineSeriesBlueprintSpecific + * @property {"Line"} [type] + * @property {Color} [color] + * @property {LineSeriesPartialOptions} [options] + * @property {Accessor} [data] + * @typedef {BaseSeriesBlueprint & LineSeriesBlueprintSpecific} LineSeriesBlueprint + * + * @typedef {Object} HistogramSeriesBlueprintSpecific + * @property {"Histogram"} type + * @property {Color} color + * @property {HistogramSeriesPartialOptions} [options] + * @property {Accessor} [data] + * @typedef {BaseSeriesBlueprint & HistogramSeriesBlueprintSpecific} HistogramSeriesBlueprint + * + * @typedef {BaselineSeriesBlueprint | CandlestickSeriesBlueprint | LineSeriesBlueprint | HistogramSeriesBlueprint} AnySeriesBlueprint + * + * @typedef {AnySeriesBlueprint["type"]} SeriesType + * + * @typedef {{ metric: MetricAccessor, unit?: Unit }} FetchedAnySeriesOptions + * + * @typedef {BaselineSeriesBlueprint & FetchedAnySeriesOptions} FetchedBaselineSeriesBlueprint + * @typedef {CandlestickSeriesBlueprint & FetchedAnySeriesOptions} FetchedCandlestickSeriesBlueprint + * @typedef {LineSeriesBlueprint & FetchedAnySeriesOptions} FetchedLineSeriesBlueprint + * @typedef {HistogramSeriesBlueprint & FetchedAnySeriesOptions} FetchedHistogramSeriesBlueprint + * @typedef {AnySeriesBlueprint & FetchedAnySeriesOptions} AnyFetchedSeriesBlueprint + * + * @typedef {Object} PartialOption + * @property {string} name + * + * @typedef {Object} ProcessedOptionAddons + * @property {string} title + * @property {string[]} path + * + * @typedef {Object} PartialExplorerOptionSpecific + * @property {"explorer"} kind + * @property {string} title + * + * @typedef {PartialOption & PartialExplorerOptionSpecific} PartialExplorerOption + * + * @typedef {Required & ProcessedOptionAddons} ExplorerOption + * + * @typedef {Object} PartialChartOptionSpecific + * @property {"chart"} [kind] + * @property {string} title + * @property {AnyFetchedSeriesBlueprint[]} [top] + * @property {AnyFetchedSeriesBlueprint[]} [bottom] + * + * @typedef {PartialOption & PartialChartOptionSpecific} PartialChartOption + * + * @typedef {Object} ProcessedChartOptionAddons + * @property {Record} top + * @property {Record} bottom + * + * @typedef {Required> & ProcessedChartOptionAddons & ProcessedOptionAddons} ChartOption + * + * @typedef {Object} PartialTableOptionSpecific + * @property {"table"} kind + * @property {string} title + * + * @typedef {PartialOption & PartialTableOptionSpecific} PartialTableOption + * + * @typedef {Required & ProcessedOptionAddons} TableOption + * + * @typedef {Object} PartialSimulationOptionSpecific + * @property {"simulation"} kind + * @property {string} title + * + * @typedef {PartialOption & PartialSimulationOptionSpecific} PartialSimulationOption + * + * @typedef {Required & ProcessedOptionAddons} SimulationOption + * + * @typedef {Object} PartialUrlOptionSpecific + * @property {"url"} [kind] + * @property {() => string} url + * @property {string} title + * @property {boolean} [qrcode] + * + * @typedef {PartialOption & PartialUrlOptionSpecific} PartialUrlOption + * + * @typedef {Required & ProcessedOptionAddons} UrlOption + * + * @typedef {PartialExplorerOption | PartialChartOption | PartialTableOption | PartialSimulationOption | PartialUrlOption} AnyPartialOption + * + * @typedef {ExplorerOption | ChartOption | TableOption | SimulationOption | UrlOption} Option + * + * @typedef {(AnyPartialOption | PartialOptionsGroup)[]} PartialOptionsTree + * + * @typedef {Object} PartialOptionsGroup + * @property {string} name + * @property {PartialOptionsTree} tree + * + * @typedef {Object} OptionsGroup + * @property {string} name + * @property {OptionsTree} tree + * + * @typedef {(Option | OptionsGroup)[]} OptionsTree + * + * @typedef {Object} UtxoCohortObject + * @property {string} name + * @property {string} title + * @property {Color} color + * @property {UtxoCohortPattern} tree + * + * Age cohorts (term, maxAge, minAge, ageRange, epoch) - have price percentiles + * @typedef {Object} AgeCohortObject + * @property {string} name + * @property {string} title + * @property {Color} color + * @property {PatternWithPricePercentiles} tree + * + * Amount cohorts (geAmount, ltAmount, amountRange, type) - no price percentiles + * @typedef {Object} AmountCohortObject + * @property {string} name + * @property {string} title + * @property {Color} color + * @property {UtxoAmountPattern} tree + * + * @typedef {Object} AddressCohortObject + * @property {string} name + * @property {string} title + * @property {Color} color + * @property {AddressCohortPattern} tree + * + * @typedef {UtxoCohortObject | AddressCohortObject} CohortObject + * + * @typedef {Object} UtxoCohortGroupObject + * @property {string} name + * @property {string} title + * @property {readonly UtxoCohortObject[]} list + * + * @typedef {Object} AgeCohortGroupObject + * @property {string} name + * @property {string} title + * @property {readonly AgeCohortObject[]} list + * + * @typedef {Object} AmountCohortGroupObject + * @property {string} name + * @property {string} title + * @property {readonly AmountCohortObject[]} list + * + * @typedef {Object} AddressCohortGroupObject + * @property {string} name + * @property {string} title + * @property {readonly AddressCohortObject[]} list + * + * @typedef {UtxoCohortGroupObject | AddressCohortGroupObject} CohortGroupObject + * + * @typedef {Object} PartialContext + * @property {Colors} colors + * @property {BrkClient} brk + * @property {BrkClient["tree"]["computed"]["constants"]} constants + * @property {(args: { metric: MetricAccessor, name: string, color?: Color, defaultActive?: boolean, unit?: Unit, options?: LineSeriesPartialOptions }) => AnyFetchedSeriesBlueprint} s + * @property {(pattern: BlockCountPattern, title: string, color?: Color) => AnyFetchedSeriesBlueprint[]} fromBlockCount + * @property {(pattern: BitcoinPattern, title: string, color?: Color) => AnyFetchedSeriesBlueprint[]} fromBitcoin + * @property {(pattern: BlockSizePattern, title: string, color?: Color) => AnyFetchedSeriesBlueprint[]} fromBlockSize + * @property {(num: number) => Constant0Pattern} getConstant + * @property {(pattern: Constant0Pattern) => MetricAccessor} flattenConstant + * @property {(args: { number?: number, name?: string, defaultActive?: boolean, lineStyle?: number, color?: Color, unit: Unit }) => FetchedLineSeriesBlueprint} createPriceLine + * @property {(args: { numbers: number[], unit: Unit }) => FetchedLineSeriesBlueprint[]} createPriceLines + * @property {(args: { constant: Constant0Pattern, name: string, unit: Unit, color?: Color, lineStyle?: number, defaultActive?: boolean }) => FetchedLineSeriesBlueprint} line + * @property {MetricAccessor} constant100 + */ + +// Re-export for type consumers +export {}; diff --git a/websites/bitview/scripts/options/unused.js b/websites/bitview/scripts/options/unused.js new file mode 100644 index 000000000..a883b83b0 --- /dev/null +++ b/websites/bitview/scripts/options/unused.js @@ -0,0 +1,44 @@ +/** Track unused metrics (dev only) */ + +import { localhost } from "../utils/env.js"; + +/** @type {Set> | null} */ +export const unused = localhost ? new Set() : null; + +/** + * Walk and collect MetricAccessors + * @param {TreeNode | null | undefined} node + * @param {Set>} set + */ +function walk(node, set) { + if (node && "by" in node) { + set.add(/** @type {MetricAccessor} */ (node)); + } else if (node && typeof node === "object") { + for (const value of Object.values(node)) { + walk(/** @type {TreeNode | null | undefined} */ (value), set); + } + } +} + +/** + * Collect all MetricAccessors from tree + * @param {TreeNode} tree + */ +export function collect(tree) { + if (unused) walk(tree, unused); +} + +/** + * Mark a metric as used + * @param {MetricAccessor} metric + */ +export function markUsed(metric) { + unused?.delete(metric); +} + +/** Log unused metrics to console */ +export function logUnused() { + if (!unused?.size) return; + const paths = [...unused].map((m) => Object.values(m.by)[0].path); + console.warn("Unused metrics:", paths); +} diff --git a/websites/bitview/scripts/panes/chart/index.js b/websites/bitview/scripts/panes/chart/index.js index 7bd75b38d..e1cdc1c9d 100644 --- a/websites/bitview/scripts/panes/chart/index.js +++ b/websites/bitview/scripts/panes/chart/index.js @@ -2,11 +2,11 @@ import { createShadow, createHorizontalChoiceField, createHeader, -} from "../../core/dom"; -import { chartElement } from "../../core/elements"; -import { ios, canShare } from "../../core/env"; -import { serdeChartableIndex, serdeOptNumber } from "../../core/serde"; -import { throttle } from "../../core/timing"; +} from "../../utils/dom"; +import { chartElement } from "../../utils/elements"; +import { ios, canShare } from "../../utils/env"; +import { serdeChartableIndex, serdeOptNumber } from "../../utils/serde"; +import { throttle } from "../../utils/timing"; const keyPrefix = "chart"; const ONE_BTC_IN_SATS = 100_000_000; @@ -26,7 +26,7 @@ const CANDLE = "candle"; * @param {Signals} args.signals * @param {WebSockets} args.webSockets * @param {Resources} args.resources - * @param {BRK} args.brk + * @param {BrkClient} args.brk */ export function init({ colors, @@ -78,6 +78,7 @@ export function init({ colors, id: "charts", resources, + brk, index, timeScaleSetCallback: (unknownTimeScaleCallback) => { // TODO: Although it mostly works in practice, need to make it more robust, there is no guarantee that this runs in order and wait for `from` and `to` to update when `index` and thus `TIMERANGE_LS_KEY` is updated @@ -327,7 +328,7 @@ export function init({ case null: case CANDLE: { series = chart.addCandlestickSeries({ - metric: "price_ohlc", + metric: brk.tree.computed.price.usd.priceOhlc, name: "Price", unit: topUnit, setDataCallback: printLatest, @@ -337,7 +338,7 @@ export function init({ } case LINE: { series = chart.addLineSeries({ - metric: "price_close", + metric: brk.tree.computed.price.usd.priceClose, name: "Price", unit: topUnit, color: colors.default, @@ -357,7 +358,7 @@ export function init({ case null: case CANDLE: { series = chart.addCandlestickSeries({ - metric: "price_ohlc_in_sats", + metric: brk.tree.computed.price.sats.priceOhlcInSats, name: "Price", unit: topUnit, inverse: true, @@ -368,7 +369,7 @@ export function init({ } case LINE: { series = chart.addLineSeries({ - metric: "price_close_in_sats", + metric: brk.tree.computed.price.sats.priceCloseInSats, name: "Price", unit: topUnit, color: colors.default, @@ -432,7 +433,8 @@ export function init({ blueprints[unit]?.forEach((blueprint, order) => { order += orderStart; - const indexes = brk.getIndexesFromMetric(blueprint.metric); + // Tree-first: metric is now an accessor with .by property + const indexes = Object.keys(blueprint.metric.by); if (indexes.includes(index)) { switch (blueprint.type) { @@ -503,7 +505,7 @@ export function init({ /** * @param {Object} args * @param {Accessor} args.option - * @param {BRK} args.brk + * @param {BrkClient} args.brk * @param {Signals} args.signals */ function createIndexSelector({ option, brk, signals }) { @@ -530,9 +532,11 @@ function createIndexSelector({ option, brk, signals }) { const rawIndexes = new Set( [Object.values(o.top), Object.values(o.bottom)] .flat(2) - .filter((blueprint) => !blueprint.metric.startsWith("constant_")) - .map((blueprint) => brk.getIndexesFromMetric(blueprint.metric)) - .flat(), + .filter((blueprint) => { + const path = Object.values(blueprint.metric.by)[0]?.path ?? ""; + return !path.includes("constant_"); + }) + .flatMap((blueprint) => blueprint.metric.indexes()), ); const serializedIndexes = [...rawIndexes].flatMap((index) => { diff --git a/websites/bitview/scripts/panes/chart/screenshot.js b/websites/bitview/scripts/panes/chart/screenshot.js index d0cfc2de1..4de0bbd81 100644 --- a/websites/bitview/scripts/panes/chart/screenshot.js +++ b/websites/bitview/scripts/panes/chart/screenshot.js @@ -1,4 +1,4 @@ -import { ios } from "../../core/env"; +import { ios } from "../../utils/env"; import { domToBlob } from "../../modules/modern-screenshot/4.6.6/dist/index.mjs"; /** diff --git a/websites/bitview/scripts/panes/explorer.js b/websites/bitview/scripts/panes/explorer.js index d9e71ea00..3ca0dfcf4 100644 --- a/websites/bitview/scripts/panes/explorer.js +++ b/websites/bitview/scripts/panes/explorer.js @@ -1,5 +1,5 @@ -import { randomFromArray } from "../core/array"; -import { explorerElement } from "../core/elements"; +import { randomFromArray } from "../utils/array"; +import { explorerElement } from "../utils/elements"; /** * @param {Object} args @@ -9,7 +9,7 @@ import { explorerElement } from "../core/elements"; * @param {Signals} args.signals * @param {WebSockets} args.webSockets * @param {Resources} args.resources - * @param {BRK} args.brk + * @param {BrkClient} args.brk */ export function init({ colors, diff --git a/websites/bitview/scripts/panes/simulation.js b/websites/bitview/scripts/panes/simulation.js index 31b493299..79597f1c1 100644 --- a/websites/bitview/scripts/panes/simulation.js +++ b/websites/bitview/scripts/panes/simulation.js @@ -1,21 +1,23 @@ +// @ts-nocheck + import { createDateRange, dateToDateIndex, differenceBetweenDates, -} from "../core/date"; +} from "../utils/date"; import { createButtonElement, createFieldElement, createHeader, createSelect, -} from "../core/dom"; -import { simulationElement } from "../core/elements"; +} from "../utils/dom"; +import { simulationElement } from "../utils/elements"; import { numberToDollars, numberToPercentage, numberToUSNumber, -} from "../core/format"; -import { serdeDate, serdeOptDate, serdeOptNumber } from "../core/serde"; +} from "../utils/format"; +import { serdeDate, serdeOptDate, serdeOptNumber } from "../utils/serde"; /** * @param {Object} args diff --git a/websites/bitview/scripts/panes/table.js b/websites/bitview/scripts/panes/table.js index b7602a8da..3de2e03c1 100644 --- a/websites/bitview/scripts/panes/table.js +++ b/websites/bitview/scripts/panes/table.js @@ -1,15 +1,17 @@ -import { randomFromArray } from "../core/array"; -import { createButtonElement, createHeader, createSelect } from "../core/dom"; -import { tableElement } from "../core/elements"; -import { serdeMetrics, serdeString, serdeUnit } from "../core/serde"; -import { resetParams } from "../core/url"; +// @ts-nocheck + +import { randomFromArray } from "../utils/array"; +import { createButtonElement, createHeader, createSelect } from "../utils/dom"; +import { tableElement } from "../utils/elements"; +import { serdeMetrics, serdeString, serdeUnit } from "../utils/serde"; +import { resetParams } from "../utils/url"; /** * @param {Object} args * @param {Signals} args.signals * @param {Option} args.option * @param {Resources} args.resources - * @param {BRK} args.brk + * @param {BrkClient} args.brk */ export function init({ signals, option, resources, brk }) { tableElement.innerHTML = "wip, will hopefuly be back soon, sorry !"; @@ -46,7 +48,7 @@ export function init({ signals, option, resources, brk }) { // * @param {Object} args // * @param {Option} args.option // * @param {Signals} args.signals -// * @param {BRK} args.brk +// * @param {BrkClient} args.brk // * @param {Resources} args.resources // */ // function createTable({ brk, signals, option, resources }) { diff --git a/modules/brk-resources/index.js b/websites/bitview/scripts/resources.js similarity index 90% rename from modules/brk-resources/index.js rename to websites/bitview/scripts/resources.js index c1cb6d3f8..7c90d4f3f 100644 --- a/modules/brk-resources/index.js +++ b/websites/bitview/scripts/resources.js @@ -1,6 +1,6 @@ /** - * @import { Signal, Signals } from "../brk-signals/index"; - * @import { MetricNode } from "../brk-client/index"; + * @import { Signal, Signals } from "./signals"; + * @import { MetricNode } from "./modules/brk-client/index"; */ /** @@ -15,7 +15,7 @@ /** * @template T * @typedef {Object} RangeState - * @property {Signal} data + * @property {Signal} response * @property {Signal} loading */ @@ -95,7 +95,7 @@ export function createResources(signals) { const key = `${from}-${to ?? ""}`; if (!ranges.has(key)) { ranges.set(key, { - data: signals.createSignal(/** @type {T[] | null} */ (null)), + response: signals.createSignal(/** @type {T | null} */ (null)), loading: signals.createSignal(false), }); } @@ -103,7 +103,7 @@ export function createResources(signals) { } return { - path: node._path, + path: node.path, range, /** * Fetch data for a range @@ -114,7 +114,7 @@ export function createResources(signals) { const r = range(from, to); r.loading.set(true); try { - const result = await node.getRange(from, to, r.data.set); + const result = await node.range(from, to, r.response.set); return result; } finally { r.loading.set(false); diff --git a/modules/brk-signals/index.js b/websites/bitview/scripts/signals.js similarity index 95% rename from modules/brk-signals/index.js rename to websites/bitview/scripts/signals.js index 363717654..3f1e6ffb0 100644 --- a/modules/brk-signals/index.js +++ b/websites/bitview/scripts/signals.js @@ -1,12 +1,9 @@ /** - * @import { SignalOptions } from "../solidjs-signals/0.6.3/dist/types/core/core" - * @import { getOwner as GetOwner, onCleanup as OnCleanup } from "../solidjs-signals/0.6.3/dist/types/core/owner" - * @import { createSignal as CreateSignal, createEffect as CreateEffect, createMemo as CreateMemo, createRoot as CreateRoot, runWithOwner as RunWithOwner, Setter } from "../solidjs-signals/0.6.3/dist/types/signals"; + * @import { SignalOptions } from "./modules/solidjs-signals/0.6.3/dist/types/core/core" + * @import { getOwner as GetOwner, onCleanup as OnCleanup } from "./modules/solidjs-signals/0.6.3/dist/types/core/owner" + * @import { createSignal as CreateSignal, createEffect as CreateEffect, createMemo as CreateMemo, createRoot as CreateRoot, runWithOwner as RunWithOwner, Setter } from "./modules/solidjs-signals/0.6.3/dist/types/signals"; */ -// test -// wkopwfk - /** * @template T * @typedef {() => T} Accessor @@ -25,7 +22,7 @@ import { createRoot, runWithOwner, onCleanup, -} from "../solidjs-signals/0.6.3/dist/prod.js"; +} from "./modules/solidjs-signals/0.6.3/dist/prod.js"; let effectCount = 0; diff --git a/websites/bitview/scripts/core/array.js b/websites/bitview/scripts/utils/array.js similarity index 100% rename from websites/bitview/scripts/core/array.js rename to websites/bitview/scripts/utils/array.js diff --git a/websites/bitview/scripts/core/colors.js b/websites/bitview/scripts/utils/colors.js similarity index 100% rename from websites/bitview/scripts/core/colors.js rename to websites/bitview/scripts/utils/colors.js diff --git a/websites/bitview/scripts/core/date.js b/websites/bitview/scripts/utils/date.js similarity index 100% rename from websites/bitview/scripts/core/date.js rename to websites/bitview/scripts/utils/date.js diff --git a/websites/bitview/scripts/core/dom.js b/websites/bitview/scripts/utils/dom.js similarity index 100% rename from websites/bitview/scripts/core/dom.js rename to websites/bitview/scripts/utils/dom.js diff --git a/websites/bitview/scripts/core/elements.js b/websites/bitview/scripts/utils/elements.js similarity index 100% rename from websites/bitview/scripts/core/elements.js rename to websites/bitview/scripts/utils/elements.js diff --git a/websites/bitview/scripts/core/env.js b/websites/bitview/scripts/utils/env.js similarity index 100% rename from websites/bitview/scripts/core/env.js rename to websites/bitview/scripts/utils/env.js diff --git a/websites/bitview/scripts/core/format.js b/websites/bitview/scripts/utils/format.js similarity index 100% rename from websites/bitview/scripts/core/format.js rename to websites/bitview/scripts/utils/format.js diff --git a/websites/bitview/scripts/core/serde.js b/websites/bitview/scripts/utils/serde.js similarity index 98% rename from websites/bitview/scripts/core/serde.js rename to websites/bitview/scripts/utils/serde.js index de6bb0ff2..cd550b1fe 100644 --- a/websites/bitview/scripts/core/serde.js +++ b/websites/bitview/scripts/utils/serde.js @@ -1,4 +1,5 @@ const localhost = window.location.hostname === "localhost"; +console.log({ localhost }); export const serdeString = { /** @@ -249,6 +250,7 @@ export const serdeUnit = { if ( (!unit || localhost) && (v.includes("in_sats") || + v.endsWith("_sats") || (v.endsWith("supply") && !(v.endsWith("circulating_supply") || v.endsWith("_own_supply"))) || v === "sent" || @@ -313,8 +315,10 @@ export const serdeUnit = { v.endsWith("value_created") || v.endsWith("value_destroyed") || ((v.includes("realized") || v.includes("true_market_mean")) && + !v.includes("unrealized") && !v.includes("ratio") && !v.includes("rel_to")) || + (v.includes("unrealized") && !v.includes("rel_to")) || ((v.endsWith("sma") || v.includes("sma_x") || v.endsWith("ema")) && !v.includes("ratio") && !v.includes("sopr") && diff --git a/websites/bitview/scripts/core/storage.js b/websites/bitview/scripts/utils/storage.js similarity index 100% rename from websites/bitview/scripts/core/storage.js rename to websites/bitview/scripts/utils/storage.js diff --git a/websites/bitview/scripts/core/timing.js b/websites/bitview/scripts/utils/timing.js similarity index 100% rename from websites/bitview/scripts/core/timing.js rename to websites/bitview/scripts/utils/timing.js diff --git a/websites/bitview/scripts/core/url.js b/websites/bitview/scripts/utils/url.js similarity index 100% rename from websites/bitview/scripts/core/url.js rename to websites/bitview/scripts/utils/url.js diff --git a/websites/bitview/scripts/core/ws.js b/websites/bitview/scripts/utils/ws.js similarity index 100% rename from websites/bitview/scripts/core/ws.js rename to websites/bitview/scripts/utils/ws.js diff --git a/websites/bitview/tsconfig.json b/websites/bitview/tsconfig.json index 6ccb37c6e..38ee578c4 100644 --- a/websites/bitview/tsconfig.json +++ b/websites/bitview/tsconfig.json @@ -3,6 +3,8 @@ "allowJs": true, "checkJs": true, "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, "target": "ESNext", "module": "ESNext", "outDir": "/tmp/brk",