From 426d7797a3c503fc94d509f325c6348e603245aa Mon Sep 17 00:00:00 2001 From: nym21 Date: Fri, 9 Jan 2026 20:00:20 +0100 Subject: [PATCH] global: big snapshot --- .gitignore | 2 + Cargo.lock | 2080 +----- Cargo.toml | 16 +- crates/brk/Cargo.toml | 3 - crates/brk/README.md | 1 - crates/brk/src/lib.rs | 4 - crates/brk_alloc/Cargo.toml | 2 +- crates/brk_bindgen/src/analysis/patterns.rs | 2 +- crates/brk_bindgen/src/analysis/tree.rs | 4 +- .../src/generators/javascript/tree.rs | 59 +- .../brk_bindgen/src/generators/python/api.rs | 13 +- crates/brk_bindgen/src/lib.rs | 2 +- crates/brk_bundler/Cargo.toml | 20 - crates/brk_bundler/README.md | 32 - crates/brk_bundler/build.rs | 8 - crates/brk_bundler/examples/bundle.rs | 37 - crates/brk_bundler/src/lib.rs | 246 - crates/brk_cli/Cargo.toml | 7 +- crates/brk_cli/README.md | 1 - crates/brk_cli/src/main.rs | 34 +- crates/brk_client/src/lib.rs | 4716 ++++++------ crates/brk_computer/Cargo.toml | 2 +- .../brk_computer/examples/computer_bench.rs | 2 +- crates/brk_computer/examples/computer_read.rs | 2 +- crates/brk_computer/examples/full_bench.rs | 2 +- .../brk_computer/src/blocks/count/compute.rs | 141 +- .../brk_computer/src/blocks/count/import.rs | 54 +- crates/brk_computer/src/blocks/count/vecs.rs | 37 +- .../src/blocks/difficulty/compute.rs | 37 +- .../src/blocks/difficulty/import.rs | 6 +- .../src/blocks/difficulty/vecs.rs | 6 +- .../src/blocks/halving/compute.rs | 37 +- .../brk_computer/src/blocks/halving/import.rs | 11 +- .../brk_computer/src/blocks/halving/vecs.rs | 6 +- .../src/blocks/interval/compute.rs | 9 +- .../src/blocks/interval/import.rs | 29 +- .../brk_computer/src/blocks/interval/vecs.rs | 9 +- .../brk_computer/src/blocks/mining/compute.rs | 98 +- .../brk_computer/src/blocks/mining/import.rs | 43 +- crates/brk_computer/src/blocks/mining/vecs.rs | 36 +- .../src/blocks/rewards/compute.rs | 65 +- .../brk_computer/src/blocks/rewards/import.rs | 24 +- .../brk_computer/src/blocks/rewards/vecs.rs | 19 +- .../brk_computer/src/blocks/size/compute.rs | 13 +- crates/brk_computer/src/blocks/size/import.rs | 42 +- crates/brk_computer/src/blocks/size/vecs.rs | 10 +- .../brk_computer/src/blocks/time/compute.rs | 25 +- crates/brk_computer/src/blocks/time/import.rs | 30 +- crates/brk_computer/src/blocks/time/vecs.rs | 16 +- .../brk_computer/src/blocks/weight/compute.rs | 10 +- .../brk_computer/src/blocks/weight/import.rs | 26 +- crates/brk_computer/src/blocks/weight/vecs.rs | 5 +- .../src/cointime/activity/compute.rs | 49 +- .../src/cointime/activity/import.rs | 20 +- .../src/cointime/activity/vecs.rs | 10 +- .../src/cointime/adjusted/compute.rs | 35 +- .../src/cointime/adjusted/import.rs | 6 +- .../src/cointime/adjusted/vecs.rs | 6 +- .../brk_computer/src/cointime/cap/compute.rs | 33 +- .../brk_computer/src/cointime/cap/import.rs | 35 +- crates/brk_computer/src/cointime/cap/vecs.rs | 10 +- .../src/cointime/pricing/compute.rs | 79 +- .../src/cointime/pricing/import.rs | 42 +- .../brk_computer/src/cointime/pricing/vecs.rs | 18 +- .../src/cointime/supply/compute.rs | 39 +- .../src/cointime/supply/import.rs | 9 +- .../brk_computer/src/cointime/supply/vecs.rs | 4 +- .../src/cointime/value/compute.rs | 48 +- .../brk_computer/src/cointime/value/import.rs | 6 +- .../brk_computer/src/cointime/value/vecs.rs | 6 +- .../src/distribution/address/address_count.rs | 418 +- .../src/distribution/address/mod.rs | 5 +- .../src/distribution/cohorts/address/vecs.rs | 57 +- .../src/distribution/cohorts/utxo/vecs.rs | 12 +- .../src/distribution/compute/aggregates.rs | 2 +- .../src/distribution/compute/block_loop.rs | 134 +- .../src/distribution/compute/context.rs | 5 +- .../src/distribution/compute/readers.rs | 14 +- .../src/distribution/compute/recover.rs | 13 +- .../src/distribution/compute/write.rs | 9 +- .../src/distribution/metrics/activity.rs | 112 +- .../src/distribution/metrics/cost_basis.rs | 77 +- .../src/distribution/metrics/mod.rs | 25 +- .../src/distribution/metrics/outputs.rs | 81 + .../src/distribution/metrics/realized.rs | 529 +- .../src/distribution/metrics/relative.rs | 514 +- .../src/distribution/metrics/supply.rs | 159 +- .../src/distribution/metrics/unrealized.rs | 373 +- crates/brk_computer/src/distribution/vecs.rs | 165 +- crates/brk_computer/src/indexes/address.rs | 188 + .../src/indexes/address/import.rs | 84 - .../brk_computer/src/indexes/address/vecs.rs | 36 - .../brk_computer/src/indexes/block/compute.rs | 115 - .../brk_computer/src/indexes/block/import.rs | 22 - crates/brk_computer/src/indexes/block/mod.rs | 5 - crates/brk_computer/src/indexes/block/vecs.rs | 17 - crates/brk_computer/src/indexes/dateindex.rs | 28 + .../brk_computer/src/indexes/decadeindex.rs | 22 + .../src/indexes/difficultyepoch.rs | 22 + .../brk_computer/src/indexes/halvingepoch.rs | 20 + crates/brk_computer/src/indexes/height.rs | 26 + crates/brk_computer/src/indexes/mod.rs | 433 +- crates/brk_computer/src/indexes/monthindex.rs | 28 + .../brk_computer/src/indexes/quarterindex.rs | 22 + .../brk_computer/src/indexes/semesterindex.rs | 22 + .../brk_computer/src/indexes/time/compute.rs | 246 - .../brk_computer/src/indexes/time/import.rs | 40 - crates/brk_computer/src/indexes/time/mod.rs | 5 - crates/brk_computer/src/indexes/time/vecs.rs | 48 - .../src/indexes/transaction/compute.rs | 25 - .../src/indexes/transaction/import.rs | 33 - .../src/indexes/transaction/mod.rs | 5 - .../src/indexes/transaction/vecs.rs | 12 - crates/brk_computer/src/indexes/txindex.rs | 28 + crates/brk_computer/src/indexes/txinindex.rs | 22 + crates/brk_computer/src/indexes/txoutindex.rs | 22 + crates/brk_computer/src/indexes/weekindex.rs | 22 + crates/brk_computer/src/indexes/yearindex.rs | 24 + .../brk_computer/src/inputs/count/compute.rs | 7 +- .../brk_computer/src/inputs/count/import.rs | 10 +- crates/brk_computer/src/inputs/count/vecs.rs | 7 +- .../brk_computer/src/inputs/spent/compute.rs | 34 +- .../brk_computer/src/inputs/spent/import.rs | 4 +- crates/brk_computer/src/inputs/spent/vecs.rs | 4 +- .../src/internal/aggregation/average.rs | 22 +- .../src/internal/aggregation/cumulative.rs | 2 +- .../src/internal/aggregation/first.rs | 2 +- .../src/internal/aggregation/last.rs | 2 +- .../src/internal/aggregation/max.rs | 2 +- .../src/internal/aggregation/min.rs | 2 +- .../src/internal/aggregation/sum.rs | 23 +- .../src/internal/aggregation/sum_cum.rs | 19 + crates/brk_computer/src/internal/compute.rs | 146 +- .../src/internal/computed/block/full.rs | 1 - .../computed/block/height_date_bytes.rs | 37 + .../computed/block/height_date_first.rs | 72 + .../computed/block/height_date_last.rs | 72 + .../computed/block/height_date_max.rs | 72 + .../computed/block/height_date_min.rs | 72 + .../src/internal/computed/block/last.rs | 10 + .../internal/computed/block/lazy_sum_cum.rs | 77 + .../src/internal/computed/block/mod.rs | 12 + .../src/internal/computed/block/sum.rs | 23 +- .../src/internal/computed/block/sum_cum.rs | 13 +- .../src/internal/computed/chain/first.rs | 68 - .../src/internal/computed/chain/last.rs | 68 - .../src/internal/computed/chain/max.rs | 68 - .../src/internal/computed/chain/min.rs | 68 - .../src/internal/computed/chain/mod.rs | 13 - .../src/internal/computed/date/first.rs | 4 +- .../src/internal/computed/date/max.rs | 4 +- .../src/internal/computed/date/min.rs | 4 +- .../brk_computer/src/internal/computed/mod.rs | 4 - .../src/internal/computed/tx/distribution.rs | 80 +- .../src/internal/computed/tx/mod.rs | 2 - .../block}/distribution.rs | 10 +- .../src/internal/derived/block/first.rs | 82 + .../derived_block => derived/block}/full.rs | 11 +- .../derived_block => derived/block}/last.rs | 9 +- .../derived_block => derived/block}/mod.rs | 2 + .../derived_block => derived/block}/sum.rs | 14 +- .../block}/sum_cum.rs | 15 +- .../src/internal/derived/date/average.rs | 50 +- .../src/internal/derived/date/distribution.rs | 65 +- .../src/internal/derived/date/first.rs | 50 +- .../src/internal/derived/date/full.rs | 79 +- .../src/internal/derived/date/last.rs | 50 +- .../src/internal/derived/date/max.rs | 50 +- .../src/internal/derived/date/min.rs | 50 +- .../src/internal/derived/date/sum.rs | 49 +- .../src/internal/derived/date/sum_cum.rs | 59 +- .../brk_computer/src/internal/derived/mod.rs | 6 +- .../src/internal/derived/tx/distribution.rs | 108 + .../internal/{computed => derived}/tx/full.rs | 10 +- .../src/internal/derived/tx/mod.rs | 5 + .../brk_computer/src/internal/group/full.rs | 2 + .../brk_computer/src/internal/group/stats.rs | 3 + .../src/internal/group/sum_cum.rs | 8 + .../src/internal/lazy/block/binary_full.rs | 1 - .../src/internal/lazy/block/binary_last.rs | 291 + .../src/internal/lazy/block/binary_sum.rs | 26 +- .../src/internal/lazy/block/binary_sum_cum.rs | 116 +- .../lazy/block/binary_sum_cum_last.rs | 121 - .../src/internal/lazy/block/computed_full.rs | 81 + .../src/internal/lazy/block/distribution.rs | 95 + .../src/internal/lazy/block/full.rs | 1 - .../src/internal/lazy/block/last.rs | 21 +- .../src/internal/lazy/block/mod.rs | 8 +- .../src/internal/lazy/block/sum.rs | 1 - .../src/internal/lazy/block/sum_cum.rs | 3 +- .../lazy/date/binary_height_date_last.rs | 76 + .../src/internal/lazy/date/binary_last.rs | 622 +- .../src/internal/lazy/date/binary_sum.rs | 55 +- .../src/internal/lazy/date/binary_sum_cum.rs | 291 +- .../internal/lazy/date/binary_sum_cum_last.rs | 297 - .../src/internal/lazy/date/full.rs | 87 +- .../src/internal/lazy/date/last.rs | 40 +- .../src/internal/lazy/date/mod.rs | 4 +- .../src/internal/lazy/date/sum.rs | 20 +- .../src/internal/lazy/date/sum_cum.rs | 53 +- .../lazy/derived_block/binary_last.rs | 144 + .../internal/lazy/derived_block/binary_sum.rs | 1 - .../lazy/derived_block/binary_sum_cum.rs | 133 +- .../lazy/derived_block/binary_sum_cum_last.rs | 136 - .../src/internal/lazy/derived_block/full.rs | 9 +- .../src/internal/lazy/derived_block/last.rs | 29 +- .../src/internal/lazy/derived_block/mod.rs | 4 +- .../src/internal/lazy/derived_block/sum.rs | 1 - .../internal/lazy/derived_block/sum_cum.rs | 5 +- .../internal/lazy/derived_tx/distribution.rs | 67 + .../src/internal/lazy/derived_tx/full.rs | 104 +- .../src/internal/lazy/derived_tx/mod.rs | 2 + .../internal/lazy/transform/binary_sum_cum.rs | 164 +- .../lazy/transform/binary_sum_cum_last.rs | 68 - .../src/internal/lazy/transform/full.rs | 37 +- .../src/internal/lazy/transform/mod.rs | 6 +- .../internal/lazy/transform/percentiles.rs | 63 + .../src/internal/lazy/transform/stats.rs | 56 + .../src/internal/lazy/transform/sum_cum.rs | 33 + .../src/internal/specialized/constant.rs | 16 +- .../src/internal/specialized/lazy_period.rs | 88 + .../src/internal/specialized/mod.rs | 4 + .../src/internal/specialized/ohlc/computed.rs | 68 + .../src/internal/specialized/ohlc/lazy.rs | 32 + .../src/internal/specialized/ohlc/mod.rs | 9 + .../src/internal/specialized/ohlc/period.rs | 29 + .../src/internal/specialized/percentiles.rs | 14 +- .../src/internal/specialized/ratio.rs | 126 +- .../src/internal/specialized/stddev.rs | 102 +- .../specialized/value/block/binary.rs | 59 +- .../internal/specialized/value/block/full.rs | 2 +- .../specialized/value/block/height.rs | 49 + .../internal/specialized/value/block/last.rs | 25 +- .../internal/specialized/value/block/lazy.rs | 2 +- .../value/block/lazy_computed_sum_cum.rs | 97 + .../specialized/value/block/lazy_height.rs | 52 + .../specialized/value/block/lazy_last.rs | 46 + .../specialized/value/block/lazy_sum_cum.rs | 110 + .../internal/specialized/value/block/mod.rs | 12 +- .../internal/specialized/value/block/sum.rs | 2 +- .../specialized/value/block/sum_cum.rs | 41 +- .../specialized/value/date/block_date_last.rs | 81 + .../specialized/value/date/derived_last.rs | 25 +- .../internal/specialized/value/date/last.rs | 28 +- .../specialized/value/date/lazy_last.rs | 40 +- .../internal/specialized/value/date/mod.rs | 2 + .../value/derived_block/sum_cum.rs | 33 +- .../specialized/value/tx/derived_full.rs | 79 + .../internal/specialized/value/tx/dollars.rs | 14 +- .../src/internal/specialized/value/tx/full.rs | 73 +- .../src/internal/specialized/value/tx/mod.rs | 2 + .../src/internal/transform/mod.rs | 2 - .../internal/transform/percentage_btc_f64.rs | 14 - .../src/internal/vec/cumulative.rs | 2 +- crates/brk_computer/src/internal/vec/first.rs | 74 + .../brk_computer/src/internal/vec/median.rs | 24 - crates/brk_computer/src/internal/vec/mod.rs | 14 +- crates/brk_computer/src/internal/vec/pct10.rs | 24 - crates/brk_computer/src/internal/vec/pct25.rs | 24 - crates/brk_computer/src/internal/vec/pct75.rs | 24 - crates/brk_computer/src/internal/vec/pct90.rs | 24 - .../src/internal/vec/percentiles.rs | 36 + crates/brk_computer/src/internal/vec/sum.rs | 2 +- crates/brk_computer/src/lib.rs | 3 +- crates/brk_computer/src/market/ath/compute.rs | 38 +- crates/brk_computer/src/market/ath/import.rs | 48 +- crates/brk_computer/src/market/ath/vecs.rs | 22 +- crates/brk_computer/src/market/dca/compute.rs | 3 +- crates/brk_computer/src/market/dca/import.rs | 12 +- crates/brk_computer/src/market/dca/vecs.rs | 6 +- .../src/market/indicators/compute.rs | 98 +- .../src/market/indicators/import.rs | 113 +- .../src/market/indicators/vecs.rs | 42 +- .../src/market/lookback/compute.rs | 4 +- .../src/market/moving_average/compute.rs | 68 +- .../src/market/moving_average/import.rs | 171 +- .../src/market/moving_average/vecs.rs | 70 +- .../brk_computer/src/market/range/compute.rs | 31 +- .../brk_computer/src/market/range/import.rs | 18 +- crates/brk_computer/src/market/range/vecs.rs | 18 +- .../src/market/returns/compute.rs | 46 +- .../brk_computer/src/market/returns/import.rs | 38 +- .../brk_computer/src/market/returns/vecs.rs | 19 +- .../src/market/volatility/import.rs | 67 +- .../src/market/volatility/vecs.rs | 19 +- .../brk_computer/src/outputs/count/compute.rs | 26 +- .../brk_computer/src/outputs/count/import.rs | 18 +- crates/brk_computer/src/outputs/count/vecs.rs | 4 +- .../brk_computer/src/outputs/spent/compute.rs | 35 +- .../brk_computer/src/outputs/spent/import.rs | 2 +- crates/brk_computer/src/outputs/spent/vecs.rs | 2 +- crates/brk_computer/src/pools/mod.rs | 42 +- crates/brk_computer/src/pools/vecs.rs | 232 +- crates/brk_computer/src/positions.rs | 39 +- crates/brk_computer/src/price/cents/import.rs | 73 + .../{indexes/address => price/cents}/mod.rs | 0 crates/brk_computer/src/price/cents/vecs.rs | 10 + crates/brk_computer/src/price/compute.rs | 2 +- crates/brk_computer/src/price/fetch.rs | 52 +- crates/brk_computer/src/price/mod.rs | 12 +- crates/brk_computer/src/price/ohlc/import.rs | 22 - crates/brk_computer/src/price/ohlc/mod.rs | 4 - crates/brk_computer/src/price/ohlc/vecs.rs | 9 - crates/brk_computer/src/price/sats/compute.rs | 268 +- crates/brk_computer/src/price/sats/import.rs | 110 +- crates/brk_computer/src/price/sats/vecs.rs | 37 +- crates/brk_computer/src/price/usd/compute.rs | 295 +- crates/brk_computer/src/price/usd/import.rs | 164 +- crates/brk_computer/src/price/usd/vecs.rs | 50 +- .../brk_computer/src/scripts/count/compute.rs | 99 +- .../brk_computer/src/scripts/count/import.rs | 145 +- crates/brk_computer/src/scripts/count/vecs.rs | 37 +- .../brk_computer/src/scripts/value/compute.rs | 30 +- .../brk_computer/src/scripts/value/import.rs | 16 +- crates/brk_computer/src/scripts/value/vecs.rs | 2 +- .../brk_computer/src/supply/burned/compute.rs | 40 +- .../brk_computer/src/supply/burned/import.rs | 32 +- crates/brk_computer/src/supply/burned/vecs.rs | 4 +- .../src/supply/circulating/import.rs | 43 +- .../src/supply/circulating/vecs.rs | 14 +- .../src/supply/inflation/compute.rs | 17 +- .../src/supply/inflation/import.rs | 8 +- .../brk_computer/src/supply/inflation/vecs.rs | 8 +- .../src/supply/market_cap/import.rs | 33 +- .../src/supply/market_cap/vecs.rs | 15 +- crates/brk_computer/src/supply/vecs.rs | 2 +- .../src/supply/velocity/compute.rs | 38 +- .../src/supply/velocity/import.rs | 24 +- .../brk_computer/src/supply/velocity/vecs.rs | 4 +- .../src/transactions/count/compute.rs | 8 +- .../src/transactions/count/import.rs | 18 +- .../src/transactions/count/vecs.rs | 4 +- .../src/transactions/fees/compute.rs | 50 +- .../src/transactions/fees/import.rs | 34 +- .../src/transactions/fees/vecs.rs | 12 +- .../src/transactions/size/compute.rs | 18 +- .../src/transactions/size/import.rs | 47 +- .../src/transactions/size/vecs.rs | 12 +- .../src/transactions/versions/compute.rs | 41 +- .../src/transactions/versions/import.rs | 21 +- .../src/transactions/versions/vecs.rs | 6 +- .../src/transactions/volume/compute.rs | 102 +- .../src/transactions/volume/import.rs | 33 +- .../src/transactions/volume/vecs.rs | 16 +- crates/brk_fetcher/Cargo.toml | 2 +- crates/brk_fetcher/src/binance.rs | 4 +- crates/brk_fetcher/src/brk.rs | 11 +- crates/brk_fetcher/src/kraken.rs | 4 +- crates/brk_fetcher/src/lib.rs | 2 +- crates/brk_fetcher/src/retry.rs | 2 +- crates/brk_fetcher/src/source.rs | 2 +- crates/brk_indexer/Cargo.toml | 2 +- crates/brk_indexer/examples/indexer.rs | 2 +- crates/brk_indexer/examples/indexer_bench.rs | 2 +- crates/brk_indexer/examples/indexer_bench2.rs | 2 +- crates/brk_indexer/examples/indexer_read.rs | 4 +- .../examples/indexer_read_speed.rs | 2 +- crates/brk_indexer/src/indexes.rs | 192 +- crates/brk_indexer/src/lib.rs | 26 +- crates/brk_indexer/src/processor/metadata.rs | 22 +- crates/brk_indexer/src/processor/tx.rs | 32 +- crates/brk_indexer/src/processor/txin.rs | 55 +- crates/brk_indexer/src/processor/txout.rs | 37 +- crates/brk_indexer/src/processor/types.rs | 6 +- crates/brk_indexer/src/readers.rs | 28 +- crates/brk_indexer/src/stores.rs | 78 +- crates/brk_indexer/src/vecs/address.rs | 309 - crates/brk_indexer/src/vecs/addresses.rs | 309 + crates/brk_indexer/src/vecs/blocks.rs | 62 +- crates/brk_indexer/src/vecs/inputs.rs | 75 + crates/brk_indexer/src/vecs/mod.rs | 138 +- crates/brk_indexer/src/vecs/output.rs | 99 - crates/brk_indexer/src/vecs/outputs.rs | 66 + crates/brk_indexer/src/vecs/scripts.rs | 99 + crates/brk_indexer/src/vecs/transactions.rs | 104 + crates/brk_indexer/src/vecs/tx.rs | 104 - crates/brk_indexer/src/vecs/txin.rs | 66 - crates/brk_indexer/src/vecs/txout.rs | 66 - crates/brk_logger/Cargo.toml | 9 +- crates/brk_logger/README.md | 4 +- crates/brk_logger/examples/log.rs | 2 +- crates/brk_logger/src/lib.rs | 307 +- crates/brk_mcp/Cargo.toml | 2 +- crates/brk_mcp/src/lib.rs | 10 +- crates/brk_mcp/src/route.rs | 2 +- crates/brk_mempool/Cargo.toml | 2 +- crates/brk_mempool/src/sync.rs | 2 +- crates/brk_query/src/impl/address.rs | 14 +- crates/brk_query/src/impl/block/info.rs | 22 +- crates/brk_query/src/impl/block/raw.rs | 13 +- crates/brk_query/src/impl/block/status.rs | 8 +- crates/brk_query/src/impl/block/timestamp.rs | 10 +- crates/brk_query/src/impl/block/txs.rs | 30 +- .../brk_query/src/impl/mining/block_fees.rs | 3 +- .../src/impl/mining/block_rewards.rs | 4 +- .../brk_query/src/impl/mining/block_sizes.rs | 4 +- .../src/impl/mining/dateindex_iter.rs | 18 +- .../brk_query/src/impl/mining/difficulty.rs | 27 +- crates/brk_query/src/impl/mining/epochs.rs | 19 +- crates/brk_query/src/impl/mining/hashrate.rs | 30 +- crates/brk_query/src/impl/mining/pools.rs | 4 +- .../brk_query/src/impl/mining/reward_stats.rs | 20 +- crates/brk_query/src/impl/transaction.rs | 87 +- crates/brk_query/src/lib.rs | 2 +- crates/brk_query/src/vecs.rs | 2 + crates/brk_reader/Cargo.toml | 2 +- crates/brk_reader/src/lib.rs | 2 +- crates/brk_rpc/Cargo.toml | 2 +- crates/brk_rpc/src/inner.rs | 2 +- crates/brk_rpc/src/lib.rs | 2 +- crates/brk_server/Cargo.toml | 3 +- crates/brk_server/examples/server.rs | 8 +- crates/brk_server/src/files/file.rs | 72 +- crates/brk_server/src/lib.rs | 33 +- crates/brk_store/src/any.rs | 1 + crates/brk_store/src/lib.rs | 11 +- crates/brk_traversable/src/lib.rs | 6 +- crates/brk_traversable/tests/derive_tests.rs | 115 - .../tests/traversable/common.rs | 162 + .../tests/traversable/computed_types.rs | 224 + .../tests/traversable/derived_date.rs | 172 + .../tests/traversable/group_types.rs | 239 + .../tests/traversable/lazy_aggregation.rs | 176 + .../brk_traversable/tests/traversable/main.rs | 12 + crates/brk_traversable_derive/Cargo.toml | 2 +- crates/brk_traversable_derive/src/lib.rs | 81 +- crates/brk_types/src/ohlc.rs | 12 + crates/brk_types/src/stored_u32.rs | 15 +- crates/brk_types/src/treenode.rs | 57 +- docs/README.md | 1 - modules/brk-client/index.js | 6481 ++++++++--------- modules/brk-client/package.json | 26 +- modules/lean-qr/2.6.0/index.d.ts | 656 -- modules/lean-qr/2.6.0/index.mjs | 2 - packages/brk_client/brk_client/__init__.py | 3006 ++++---- scripts/publish.sh | 1 - websites/bitview/index.html | 143 + websites/bitview/scripts/options/chain.js | 63 +- .../scripts/options/cohorts/address.js | 35 +- .../bitview/scripts/options/cohorts/utxo.js | 26 +- websites/bitview/scripts/options/cointime.js | 21 +- websites/bitview/service-worker.js | 148 +- 442 files changed, 17952 insertions(+), 20071 deletions(-) delete mode 100644 crates/brk_bundler/Cargo.toml delete mode 100644 crates/brk_bundler/README.md delete mode 100644 crates/brk_bundler/build.rs delete mode 100644 crates/brk_bundler/examples/bundle.rs delete mode 100644 crates/brk_bundler/src/lib.rs create mode 100644 crates/brk_computer/src/distribution/metrics/outputs.rs create mode 100644 crates/brk_computer/src/indexes/address.rs delete mode 100644 crates/brk_computer/src/indexes/address/import.rs delete mode 100644 crates/brk_computer/src/indexes/address/vecs.rs delete mode 100644 crates/brk_computer/src/indexes/block/compute.rs delete mode 100644 crates/brk_computer/src/indexes/block/import.rs delete mode 100644 crates/brk_computer/src/indexes/block/mod.rs delete mode 100644 crates/brk_computer/src/indexes/block/vecs.rs create mode 100644 crates/brk_computer/src/indexes/dateindex.rs create mode 100644 crates/brk_computer/src/indexes/decadeindex.rs create mode 100644 crates/brk_computer/src/indexes/difficultyepoch.rs create mode 100644 crates/brk_computer/src/indexes/halvingepoch.rs create mode 100644 crates/brk_computer/src/indexes/height.rs create mode 100644 crates/brk_computer/src/indexes/monthindex.rs create mode 100644 crates/brk_computer/src/indexes/quarterindex.rs create mode 100644 crates/brk_computer/src/indexes/semesterindex.rs delete mode 100644 crates/brk_computer/src/indexes/time/compute.rs delete mode 100644 crates/brk_computer/src/indexes/time/import.rs delete mode 100644 crates/brk_computer/src/indexes/time/mod.rs delete mode 100644 crates/brk_computer/src/indexes/time/vecs.rs delete mode 100644 crates/brk_computer/src/indexes/transaction/compute.rs delete mode 100644 crates/brk_computer/src/indexes/transaction/import.rs delete mode 100644 crates/brk_computer/src/indexes/transaction/mod.rs delete mode 100644 crates/brk_computer/src/indexes/transaction/vecs.rs create mode 100644 crates/brk_computer/src/indexes/txindex.rs create mode 100644 crates/brk_computer/src/indexes/txinindex.rs create mode 100644 crates/brk_computer/src/indexes/txoutindex.rs create mode 100644 crates/brk_computer/src/indexes/weekindex.rs create mode 100644 crates/brk_computer/src/indexes/yearindex.rs create mode 100644 crates/brk_computer/src/internal/computed/block/height_date_bytes.rs create mode 100644 crates/brk_computer/src/internal/computed/block/height_date_first.rs create mode 100644 crates/brk_computer/src/internal/computed/block/height_date_last.rs create mode 100644 crates/brk_computer/src/internal/computed/block/height_date_max.rs create mode 100644 crates/brk_computer/src/internal/computed/block/height_date_min.rs create mode 100644 crates/brk_computer/src/internal/computed/block/lazy_sum_cum.rs delete mode 100644 crates/brk_computer/src/internal/computed/chain/first.rs delete mode 100644 crates/brk_computer/src/internal/computed/chain/last.rs delete mode 100644 crates/brk_computer/src/internal/computed/chain/max.rs delete mode 100644 crates/brk_computer/src/internal/computed/chain/min.rs delete mode 100644 crates/brk_computer/src/internal/computed/chain/mod.rs rename crates/brk_computer/src/internal/{computed/derived_block => derived/block}/distribution.rs (89%) create mode 100644 crates/brk_computer/src/internal/derived/block/first.rs rename crates/brk_computer/src/internal/{computed/derived_block => derived/block}/full.rs (91%) rename crates/brk_computer/src/internal/{computed/derived_block => derived/block}/last.rs (88%) rename crates/brk_computer/src/internal/{computed/derived_block => derived/block}/mod.rs (84%) rename crates/brk_computer/src/internal/{computed/derived_block => derived/block}/sum.rs (88%) rename crates/brk_computer/src/internal/{computed/derived_block => derived/block}/sum_cum.rs (92%) create mode 100644 crates/brk_computer/src/internal/derived/tx/distribution.rs rename crates/brk_computer/src/internal/{computed => derived}/tx/full.rs (91%) create mode 100644 crates/brk_computer/src/internal/derived/tx/mod.rs create mode 100644 crates/brk_computer/src/internal/lazy/block/binary_last.rs delete mode 100644 crates/brk_computer/src/internal/lazy/block/binary_sum_cum_last.rs create mode 100644 crates/brk_computer/src/internal/lazy/block/computed_full.rs create mode 100644 crates/brk_computer/src/internal/lazy/block/distribution.rs create mode 100644 crates/brk_computer/src/internal/lazy/date/binary_height_date_last.rs delete mode 100644 crates/brk_computer/src/internal/lazy/date/binary_sum_cum_last.rs create mode 100644 crates/brk_computer/src/internal/lazy/derived_block/binary_last.rs delete mode 100644 crates/brk_computer/src/internal/lazy/derived_block/binary_sum_cum_last.rs create mode 100644 crates/brk_computer/src/internal/lazy/derived_tx/distribution.rs delete mode 100644 crates/brk_computer/src/internal/lazy/transform/binary_sum_cum_last.rs create mode 100644 crates/brk_computer/src/internal/lazy/transform/percentiles.rs create mode 100644 crates/brk_computer/src/internal/lazy/transform/stats.rs create mode 100644 crates/brk_computer/src/internal/specialized/lazy_period.rs create mode 100644 crates/brk_computer/src/internal/specialized/ohlc/computed.rs create mode 100644 crates/brk_computer/src/internal/specialized/ohlc/lazy.rs create mode 100644 crates/brk_computer/src/internal/specialized/ohlc/mod.rs create mode 100644 crates/brk_computer/src/internal/specialized/ohlc/period.rs create mode 100644 crates/brk_computer/src/internal/specialized/value/block/height.rs create mode 100644 crates/brk_computer/src/internal/specialized/value/block/lazy_computed_sum_cum.rs create mode 100644 crates/brk_computer/src/internal/specialized/value/block/lazy_height.rs create mode 100644 crates/brk_computer/src/internal/specialized/value/block/lazy_last.rs create mode 100644 crates/brk_computer/src/internal/specialized/value/block/lazy_sum_cum.rs create mode 100644 crates/brk_computer/src/internal/specialized/value/date/block_date_last.rs create mode 100644 crates/brk_computer/src/internal/specialized/value/tx/derived_full.rs delete mode 100644 crates/brk_computer/src/internal/transform/percentage_btc_f64.rs create mode 100644 crates/brk_computer/src/internal/vec/first.rs delete mode 100644 crates/brk_computer/src/internal/vec/median.rs delete mode 100644 crates/brk_computer/src/internal/vec/pct10.rs delete mode 100644 crates/brk_computer/src/internal/vec/pct25.rs delete mode 100644 crates/brk_computer/src/internal/vec/pct75.rs delete mode 100644 crates/brk_computer/src/internal/vec/pct90.rs create mode 100644 crates/brk_computer/src/internal/vec/percentiles.rs create mode 100644 crates/brk_computer/src/price/cents/import.rs rename crates/brk_computer/src/{indexes/address => price/cents}/mod.rs (100%) create mode 100644 crates/brk_computer/src/price/cents/vecs.rs delete mode 100644 crates/brk_computer/src/price/ohlc/import.rs delete mode 100644 crates/brk_computer/src/price/ohlc/mod.rs delete mode 100644 crates/brk_computer/src/price/ohlc/vecs.rs delete mode 100644 crates/brk_indexer/src/vecs/address.rs create mode 100644 crates/brk_indexer/src/vecs/addresses.rs create mode 100644 crates/brk_indexer/src/vecs/inputs.rs delete mode 100644 crates/brk_indexer/src/vecs/output.rs create mode 100644 crates/brk_indexer/src/vecs/outputs.rs create mode 100644 crates/brk_indexer/src/vecs/scripts.rs create mode 100644 crates/brk_indexer/src/vecs/transactions.rs delete mode 100644 crates/brk_indexer/src/vecs/tx.rs delete mode 100644 crates/brk_indexer/src/vecs/txin.rs delete mode 100644 crates/brk_indexer/src/vecs/txout.rs create mode 100644 crates/brk_traversable/tests/traversable/common.rs create mode 100644 crates/brk_traversable/tests/traversable/computed_types.rs create mode 100644 crates/brk_traversable/tests/traversable/derived_date.rs create mode 100644 crates/brk_traversable/tests/traversable/group_types.rs create mode 100644 crates/brk_traversable/tests/traversable/lazy_aggregation.rs create mode 100644 crates/brk_traversable/tests/traversable/main.rs delete mode 100644 modules/lean-qr/2.6.0/index.d.ts delete mode 100644 modules/lean-qr/2.6.0/index.mjs diff --git a/.gitignore b/.gitignore index 5af3b6170..27fe5784a 100644 --- a/.gitignore +++ b/.gitignore @@ -14,7 +14,9 @@ bridge/ _* !__*.py /*.md +/*.py /api.json +/*.json # Logs *.log* diff --git a/Cargo.lock b/Cargo.lock index 9017a709e..d0fdb1b1e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -75,12 +75,6 @@ dependencies = [ "alloc-no-stdlib", ] -[[package]] -name = "allocator-api2" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -140,18 +134,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "anyhow" -version = "1.0.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" - -[[package]] -name = "append-only-vec" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2114736faba96bcd79595c700d03183f61357b9fbce14852515e59f3bee4ed4a" - [[package]] name = "arbitrary" version = "1.4.2" @@ -161,18 +143,6 @@ dependencies = [ "derive_arbitrary", ] -[[package]] -name = "arcstr" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03918c3dbd7701a85c6b9887732e2921175f26c350b4563841d0958c21d57e6d" - -[[package]] -name = "arrayref" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" - [[package]] name = "arrayvec" version = "0.7.6" @@ -192,28 +162,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "async-scoped" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4042078ea593edffc452eef14e99fdb2b120caa4ad9618bcdeabc4a023b98740" -dependencies = [ - "futures", - "pin-project", - "tokio", -] - -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "atomic-waker" version = "1.1.2" @@ -293,12 +241,6 @@ dependencies = [ "windows-link", ] -[[package]] -name = "base-encode" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17bd29f7c70f32e9387f4d4acfa5ea7b7749ef784fb78cf382df97069337b8c" - [[package]] name = "base58ck" version = "0.1.0" @@ -321,16 +263,6 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" -[[package]] -name = "base64-simd" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" -dependencies = [ - "outref", - "vsimd", -] - [[package]] name = "bech32" version = "0.11.1" @@ -343,26 +275,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef0a3155e943e341e557863e69a708999c94ede624e37865c8e2a91b94efa78f" -[[package]] -name = "bincode" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36eaf5d7b090263e8150820482d5d93cd964a81e4019913c972f4edcc6edb740" -dependencies = [ - "bincode_derive", - "serde", - "unty", -] - -[[package]] -name = "bincode_derive" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf95709a440f45e986983918d0e8a1f30a9b1df04918fc828670606804ac3c09" -dependencies = [ - "virtue", -] - [[package]] name = "bindgen" version = "0.72.1" @@ -372,7 +284,7 @@ dependencies = [ "bitflags 2.10.0", "cexpr", "clang-sys", - "itertools 0.13.0", + "itertools", "proc-macro2", "quote", "regex", @@ -381,21 +293,6 @@ dependencies = [ "syn", ] -[[package]] -name = "bit-set" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" - [[package]] name = "bitcoin" version = "0.32.8" @@ -485,31 +382,6 @@ name = "bitflags" version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" -dependencies = [ - "serde_core", -] - -[[package]] -name = "blake3" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if", - "constant_time_eq", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] [[package]] name = "block2" @@ -526,7 +398,6 @@ version = "0.1.0-alpha.2" dependencies = [ "brk_bencher", "brk_bindgen", - "brk_bundler", "brk_client", "brk_cohort", "brk_computer", @@ -551,8 +422,8 @@ name = "brk_alloc" version = "0.1.0-alpha.2" dependencies = [ "libmimalloc-sys", - "log", "mimalloc", + "tracing", ] [[package]] @@ -586,25 +457,12 @@ dependencies = [ "vecdb", ] -[[package]] -name = "brk_bundler" -version = "0.1.0-alpha.2" -dependencies = [ - "brk_rolldown", - "env_logger", - "log", - "notify", - "sugar_path", - "tokio", -] - [[package]] name = "brk_cli" version = "0.1.0-alpha.2" dependencies = [ "brk_alloc", "brk_bindgen", - "brk_bundler", "brk_computer", "brk_error", "brk_fetcher", @@ -618,11 +476,12 @@ dependencies = [ "brk_server", "clap", "color-eyre", - "log", + "importmap", "minreq", "serde", "tokio", "toml", + "tracing", "vecdb", "zip", ] @@ -669,13 +528,13 @@ dependencies = [ "brk_types", "color-eyre", "derive_more", - "log", "pco", "rayon", "rustc-hash", "schemars", "serde", "smallvec", + "tracing", "vecdb", ] @@ -701,9 +560,9 @@ dependencies = [ "brk_error", "brk_logger", "brk_types", - "log", "minreq", "serde_json", + "tracing", ] [[package]] @@ -724,10 +583,10 @@ dependencies = [ "brk_types", "color-eyre", "fjall", - "log", "rayon", "rlimit", "rustc-hash", + "tracing", "vecdb", ] @@ -745,11 +604,12 @@ dependencies = [ name = "brk_logger" version = "0.1.0-alpha.2" dependencies = [ - "env_logger", "jiff", - "log", + "logroller", "owo-colors", - "parking_lot", + "tracing", + "tracing-appender", + "tracing-subscriber", ] [[package]] @@ -758,11 +618,11 @@ version = "0.1.0-alpha.2" dependencies = [ "axum", "brk_rmcp", - "log", "minreq", "schemars", "serde", "serde_json", + "tracing", ] [[package]] @@ -774,10 +634,10 @@ dependencies = [ "brk_rpc", "brk_types", "derive_more", - "log", "parking_lot", "rustc-hash", "smallvec", + "tracing", ] [[package]] @@ -813,9 +673,9 @@ dependencies = [ "brk_types", "crossbeam", "derive_more", - "log", "parking_lot", "rayon", + "tracing", ] [[package]] @@ -861,370 +721,6 @@ dependencies = [ "syn", ] -[[package]] -name = "brk_rolldown" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a03eb8d04b8b9d23aa81eaf2f13328396ee32501132b70c5ad299a03c098a3" -dependencies = [ - "anyhow", - "append-only-vec", - "arcstr", - "bitflags 2.10.0", - "brk_rolldown_common", - "brk_rolldown_dev_common", - "brk_rolldown_devtools", - "brk_rolldown_ecmascript", - "brk_rolldown_ecmascript_utils", - "brk_rolldown_error", - "brk_rolldown_fs", - "brk_rolldown_plugin", - "brk_rolldown_plugin_chunk_import_map", - "brk_rolldown_plugin_data_uri", - "brk_rolldown_plugin_hmr", - "brk_rolldown_plugin_lazy_compilation", - "brk_rolldown_plugin_oxc_runtime", - "brk_rolldown_resolver", - "brk_rolldown_sourcemap", - "brk_rolldown_std_utils", - "brk_rolldown_tracing", - "brk_rolldown_utils", - "brk_string_wizard", - "commondir", - "css-module-lexer", - "futures", - "indexmap", - "itertools 0.14.0", - "itoa", - "json-escape-simd", - "memchr", - "oxc", - "oxc_allocator", - "oxc_ecmascript", - "oxc_index", - "oxc_traverse", - "petgraph", - "rayon", - "rolldown-notify", - "rustc-hash", - "serde", - "serde_json", - "sugar_path", - "tokio", - "tracing", - "url", - "xxhash-rust", -] - -[[package]] -name = "brk_rolldown_common" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bcba1505be9175212c59de099e29b06caff51a0391ade1fe2f778ef7fe62ac5" -dependencies = [ - "anyhow", - "arcstr", - "bitflags 2.10.0", - "brk_rolldown_ecmascript", - "brk_rolldown_error", - "brk_rolldown_sourcemap", - "brk_rolldown_std_utils", - "brk_rolldown_utils", - "brk_string_wizard", - "dashmap", - "derive_more", - "fast-glob", - "itertools 0.14.0", - "num-bigint", - "oxc", - "oxc_ecmascript", - "oxc_index", - "oxc_resolver", - "rustc-hash", - "serde", - "serde_json", - "simdutf8", - "sugar_path", - "tokio", -] - -[[package]] -name = "brk_rolldown_dev_common" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af076c673b21965be6817828da3d0abb036740f936cbd2a4745320f41d50c7b9" -dependencies = [ - "brk_rolldown_common", - "brk_rolldown_error", - "derive_more", -] - -[[package]] -name = "brk_rolldown_devtools" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fe858727eb5c5fa658b3278f0b77c4a346777b9ab7f80281227544ddea240a0" -dependencies = [ - "blake3", - "brk_rolldown_devtools_action", - "dashmap", - "rustc-hash", - "serde", - "serde_json", - "tracing", - "tracing-subscriber", -] - -[[package]] -name = "brk_rolldown_devtools_action" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58ce01745ba57a89ff281495195f2a1dd1a9f1827ea97b8cdb0f456dd70aad7e" -dependencies = [ - "serde", - "ts-rs", -] - -[[package]] -name = "brk_rolldown_ecmascript" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d800edb568de11ed78992999714715967ff10546680074200e0e55cc5c3c1c96" -dependencies = [ - "arcstr", - "brk_rolldown_error", - "oxc", - "oxc_sourcemap", - "self_cell", -] - -[[package]] -name = "brk_rolldown_ecmascript_utils" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e9929eb305c9e89c4c7fa67609b646ce1dac106bef8174e2efec49cf5e4860c" -dependencies = [ - "brk_rolldown_common", - "brk_rolldown_utils", - "oxc", - "smallvec", -] - -[[package]] -name = "brk_rolldown_error" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edf088c2f5c281a9414cdaca81b43d0aa3d4f58e495dde3b4bf9b32b2692962b" -dependencies = [ - "anyhow", - "arcstr", - "bitflags 2.10.0", - "derive_more", - "heck", - "oxc", - "oxc_resolver", - "rolldown-ariadne", - "ropey", - "rustc-hash", - "sugar_path", -] - -[[package]] -name = "brk_rolldown_fs" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84028eb06b4c568820acf53a38ec655ea53d0068d3d9fc182c95a8fe6dade8de" -dependencies = [ - "oxc_resolver", - "vfs", -] - -[[package]] -name = "brk_rolldown_plugin" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9cca9f803f2b59cd995f95a274e4cb2444390773077bd43c09ce0975a07456" -dependencies = [ - "anyhow", - "arcstr", - "async-trait", - "bitflags 2.10.0", - "brk_rolldown_common", - "brk_rolldown_devtools", - "brk_rolldown_ecmascript", - "brk_rolldown_error", - "brk_rolldown_resolver", - "brk_rolldown_sourcemap", - "brk_rolldown_utils", - "brk_string_wizard", - "dashmap", - "derive_more", - "nodejs-built-in-modules", - "oxc_index", - "rustc-hash", - "serde", - "serde_json", - "sugar_path", - "tokio", - "tracing", - "typedmap", -] - -[[package]] -name = "brk_rolldown_plugin_chunk_import_map" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f2a46fe5e18e0a32117ad1bea1a965b7a612d4ff02cdddefa45974fca2d918" -dependencies = [ - "arcstr", - "brk_rolldown_common", - "brk_rolldown_plugin", - "brk_rolldown_utils", - "rustc-hash", - "serde_json", - "xxhash-rust", -] - -[[package]] -name = "brk_rolldown_plugin_data_uri" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ac7a2b36c4447385800a3ad79cc8abc490d6321850f4976a641bceda12fa91a" -dependencies = [ - "arcstr", - "base64-simd", - "brk_rolldown_common", - "brk_rolldown_plugin", - "brk_rolldown_utils", - "simdutf8", - "urlencoding", -] - -[[package]] -name = "brk_rolldown_plugin_hmr" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f5f24a619e9a43ecf4716eafb10d7e92b5a5b36ef0a8ec844e7e4acae66700c" -dependencies = [ - "arcstr", - "brk_rolldown_common", - "brk_rolldown_plugin", - "oxc", -] - -[[package]] -name = "brk_rolldown_plugin_lazy_compilation" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99cc47862efec15f1ff9a999059eceac70864170af745ce6709e12d20178e953" -dependencies = [ - "arcstr", - "brk_rolldown_common", - "brk_rolldown_plugin", - "brk_rolldown_utils", -] - -[[package]] -name = "brk_rolldown_plugin_oxc_runtime" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7bc9949621e6578900c55bee1d21d02846dd5c3d9779e014726cc2e07f3a5e8" -dependencies = [ - "arcstr", - "brk_rolldown_plugin", - "brk_rolldown_utils", - "phf", -] - -[[package]] -name = "brk_rolldown_resolver" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ca8c556deb2a8ede39b31e354983d050831d61e70fcc1b306a9523c4d5714c8" -dependencies = [ - "anyhow", - "arcstr", - "brk_rolldown_common", - "brk_rolldown_fs", - "brk_rolldown_utils", - "dashmap", - "itertools 0.14.0", - "oxc_resolver", - "sugar_path", -] - -[[package]] -name = "brk_rolldown_sourcemap" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d31ca16e9e3b35c18912426cac514389a9f90926cf48c0a30493ca563142fda" -dependencies = [ - "brk_rolldown_utils", - "memchr", - "oxc", - "oxc_sourcemap", - "rustc-hash", -] - -[[package]] -name = "brk_rolldown_std_utils" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5ecd853b69e91240e4274d7fe4e8ccdcda6b873ff0587cc349a0824eff2f5c3" -dependencies = [ - "regex", -] - -[[package]] -name = "brk_rolldown_tracing" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b193c3c65b2c3d42b33b07e2bfd9beaa57e7173acd0be3160f1287b73ed40f90" -dependencies = [ - "tracing", - "tracing-chrome", - "tracing-subscriber", -] - -[[package]] -name = "brk_rolldown_utils" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ea0931cd583c7f4390d471e0b2d2ec2ed41281ab84d075c58d47baca3a03de" -dependencies = [ - "anyhow", - "arcstr", - "async-scoped", - "base-encode", - "base64-simd", - "brk_rolldown_error", - "brk_rolldown_std_utils", - "cow-utils", - "dashmap", - "fast-glob", - "form_urlencoded", - "futures", - "indexmap", - "infer", - "itoa", - "memchr", - "mime", - "nom 8.0.0", - "oxc", - "oxc_index", - "phf", - "rayon", - "regex", - "regress", - "rustc-hash", - "serde_json", - "simdutf8", - "sugar_path", - "tokio", - "uuid", - "xxhash-rust", -] - [[package]] name = "brk_rpc" version = "0.1.0-alpha.2" @@ -1234,8 +730,8 @@ dependencies = [ "brk_error", "brk_logger", "brk_types", - "log", "parking_lot", + "tracing", ] [[package]] @@ -1259,7 +755,6 @@ dependencies = [ "brk_types", "derive_more", "jiff", - "log", "quick_cache", "schemars", "serde", @@ -1281,19 +776,6 @@ dependencies = [ "rustc-hash", ] -[[package]] -name = "brk_string_wizard" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b426c5c8e9dc5d9117930d8dada29790a3be0a6e1ab329a915253272a666998" -dependencies = [ - "memchr", - "oxc_index", - "oxc_sourcemap", - "rustc-hash", - "serde", -] - [[package]] name = "brk_traversable" version = "0.1.0-alpha.2" @@ -1362,9 +844,6 @@ name = "bumpalo" version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" -dependencies = [ - "allocator-api2", -] [[package]] name = "bytemuck" @@ -1396,15 +875,6 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dda4398f387cc6395a3e93b3867cd9abda914c97a0b344d1eefb2e5c51785fca" -[[package]] -name = "castaway" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" -dependencies = [ - "rustversion", -] - [[package]] name = "cc" version = "1.2.51" @@ -1423,18 +893,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" dependencies = [ - "nom 7.1.3", -] - -[[package]] -name = "cfb" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38f2da7a0a2c4ccf0065be06397cc26a81f4e528be095826eee9d4adbb8c60f" -dependencies = [ - "byteorder", - "fnv", - "uuid", + "nom", ] [[package]] @@ -1553,30 +1012,6 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" -[[package]] -name = "commondir" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab552acb7c0a751c75c3dd4f9b95d31ed85c985ce5c70232a2952ffbe7ecfda5" -dependencies = [ - "thiserror 1.0.69", -] - -[[package]] -name = "compact_str" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a" -dependencies = [ - "castaway", - "cfg-if", - "itoa", - "rustversion", - "ryu", - "serde", - "static_assertions", -] - [[package]] name = "compare" version = "0.0.6" @@ -1603,21 +1038,6 @@ version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d" -[[package]] -name = "concurrent_lru" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7feb5cb312f774e8a24540e27206db4e890f7d488563671d24a16389cf4c2e4e" -dependencies = [ - "once_cell", -] - -[[package]] -name = "constant_time_eq" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" - [[package]] name = "convert_case" version = "0.10.0" @@ -1679,21 +1099,6 @@ dependencies = [ "libc", ] -[[package]] -name = "cow-utils" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "417bef24afe1460300965a25ff4a24b8b45ad011948302ec221e8a0a81eb2c79" - -[[package]] -name = "cpufeatures" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" -dependencies = [ - "libc", -] - [[package]] name = "crc32fast" version = "1.5.0" @@ -1775,25 +1180,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" -[[package]] -name = "crypto-common" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "css-module-lexer" -version = "0.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b51940c54c6ca015d3add383571ec5610114466eb67aa0a27096e1dcf3c9e29" -dependencies = [ - "smallvec", -] - [[package]] name = "ctrlc" version = "3.5.1" @@ -1903,16 +1289,6 @@ dependencies = [ "unicode-xid", ] -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "crypto-common", -] - [[package]] name = "dirs" version = "6.0.0" @@ -1966,12 +1342,6 @@ dependencies = [ "libloading", ] -[[package]] -name = "dragonbox_ecma" -version = "0.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d742b56656e8b14d63e7ea9806597b1849ae25412584c8adf78c0f67bd985e66" - [[package]] name = "dtype_dispatch" version = "0.1.1" @@ -2002,12 +1372,6 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -[[package]] -name = "endian-type" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "869b0adbda23651a9c5c0c3d270aac9fcb52e8622a8f2b17e57802d7791962f2" - [[package]] name = "enum_dispatch" version = "0.3.13" @@ -2020,29 +1384,6 @@ dependencies = [ "syn", ] -[[package]] -name = "env_filter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf3c259d255ca70051b30e2e95b5446cdb8949ac4cd22c0d7fd634d89f568e2" -dependencies = [ - "log", - "regex", -] - -[[package]] -name = "env_logger" -version = "0.11.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f" -dependencies = [ - "anstream", - "anstyle", - "env_filter", - "jiff", - "log", -] - [[package]] name = "equivalent" version = "1.0.2" @@ -2069,26 +1410,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "fancy-regex" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72cf461f865c862bb7dc573f643dd6a2b6842f7c30b07882b56bd148cc2761b8" -dependencies = [ - "bit-set", - "regex-automata", - "regex-syntax", -] - -[[package]] -name = "fast-glob" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d26eec0ae9682c457cb0f85de67ad417b716ae852736a5d94c2ad6e92a997c9" -dependencies = [ - "arrayvec", -] - [[package]] name = "fastrand" version = "2.3.0" @@ -2104,35 +1425,17 @@ dependencies = [ "simd-adler32", ] -[[package]] -name = "filetime" -version = "0.2.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc0505cd1b6fa6580283f6bdf70a73fcf4aba1184038c90902b92b3dd0df63ed" -dependencies = [ - "cfg-if", - "libc", - "libredox", - "windows-sys 0.60.2", -] - [[package]] name = "find-msvc-tools" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "645cbb3a84e60b7531617d5ae4e57f7e27308f6445f5abf653209ea76dec8dff" -[[package]] -name = "fixedbitset" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" - [[package]] name = "fjall" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4986f550347ed1666561f36e8bf1be3c97df72850ecef0140129da6e2d0aa911" +checksum = "4f69637c02d38ad1b0f003101d0195a60368130aa17d9ef78b1557d265a22093" dependencies = [ "byteorder-lite", "byteview", @@ -2156,15 +1459,6 @@ dependencies = [ "miniz_oxide", ] -[[package]] -name = "float-cmp" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8" -dependencies = [ - "num-traits", -] - [[package]] name = "float-ord" version = "0.3.2" @@ -2186,18 +1480,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foldhash" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" - -[[package]] -name = "foldhash" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" - [[package]] name = "font-kit" version = "0.14.3" @@ -2270,15 +1552,6 @@ dependencies = [ "pkg-config", ] -[[package]] -name = "fsevent-sys" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" -dependencies = [ - "libc", -] - [[package]] name = "futures" version = "0.3.31" @@ -2368,16 +1641,6 @@ dependencies = [ "slab", ] -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", -] - [[package]] name = "getrandom" version = "0.2.16" @@ -2434,40 +1697,17 @@ dependencies = [ "zerocopy", ] -[[package]] -name = "halfbrown" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ed2f2edad8a14c8186b847909a41fbb9c3eafa44f88bd891114ed5019da09" -dependencies = [ - "hashbrown 0.16.1", -] - [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -[[package]] -name = "hashbrown" -version = "0.15.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" -dependencies = [ - "foldhash 0.1.5", -] - [[package]] name = "hashbrown" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" -dependencies = [ - "allocator-api2", - "equivalent", - "foldhash 0.2.0", -] [[package]] name = "heck" @@ -2718,6 +1958,18 @@ dependencies = [ "png", ] +[[package]] +name = "importmap" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f6515f68008bdbc79963205f7e6dab4bb4564a1ca79ae22238d0ae8edb2528f" +dependencies = [ + "rapidhash", + "serde", + "serde_json", + "walkdir", +] + [[package]] name = "indenter" version = "0.3.4" @@ -2726,9 +1978,9 @@ checksum = "964de6e86d545b246d84badc0fef527924ace5134f30641c203ef52ba83f58d5" [[package]] name = "indexmap" -version = "2.12.1" +version = "2.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" dependencies = [ "equivalent", "hashbrown 0.16.1", @@ -2736,35 +1988,6 @@ dependencies = [ "serde_core", ] -[[package]] -name = "infer" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a588916bfdfd92e71cacef98a63d9b1f0d74d6599980d11894290e7ddefffcf7" -dependencies = [ - "cfb", -] - -[[package]] -name = "inotify" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" -dependencies = [ - "bitflags 2.10.0", - "inotify-sys", - "libc", -] - -[[package]] -name = "inotify-sys" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" -dependencies = [ - "libc", -] - [[package]] name = "interval-heap" version = "0.0.5" @@ -2789,15 +2012,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.17" @@ -2806,9 +2020,9 @@ checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] name = "jiff" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a87d9b8105c23642f50cbbae03d1f75d8422c5cb98ce7ee9271f7ff7505be6b8" +checksum = "e67e8da4c49d6d9909fe03361f9b620f58898859f5c7aded68351e85e71ecf50" dependencies = [ "jiff-static", "jiff-tzdb-platform", @@ -2821,9 +2035,9 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b787bebb543f8969132630c51fd0afab173a86c6abae56ff3b9e5e3e3f9f6e58" +checksum = "e0c84ee7f197eca9a86c6fd6cb771e55eb991632f15f2bc3ca6ec838929e6e78" dependencies = [ "proc-macro2", "quote", @@ -2871,21 +2085,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "json-escape-simd" -version = "3.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3c2a6c0b4b5637c41719973ef40c6a1cf564f9db6958350de6193fbee9c23f5" - -[[package]] -name = "json-strip-comments" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25376d12b2f6ae53f986f86e2a808a56af03d72284ae24fc35a2e290d09ee3c3" -dependencies = [ - "memchr", -] - [[package]] name = "jsonrpc" version = "0.18.0" @@ -2898,26 +2097,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "kqueue" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" -dependencies = [ - "kqueue-sys", - "libc", -] - -[[package]] -name = "kqueue-sys" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" -dependencies = [ - "bitflags 1.3.2", - "libc", -] - [[package]] name = "lazy_static" version = "1.5.0" @@ -2926,9 +2105,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.179" +version = "0.2.180" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5a2d376baa530d1238d133232d15e239abad80d05838b4b59354e5268af431f" +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" [[package]] name = "libloading" @@ -2970,7 +2149,6 @@ checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ "bitflags 2.10.0", "libc", - "redox_syscall 0.7.0", ] [[package]] @@ -3010,10 +2188,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] -name = "lsm-tree" -version = "3.0.0" +name = "logroller" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a206e87e8bc38114045060ec1fc6bc4e4559748a37e9622b910d80e48863e87" +checksum = "83db12bbf439ebe64c0b0e4402f435b6f866db498fc1ae17e1b5d1a01625e2be" +dependencies = [ + "chrono", + "flate2", + "regex", + "thiserror 1.0.69", +] + +[[package]] +name = "lsm-tree" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b875f1dfe14f557f805b167fb9b0fc54c5560c7a4bd6ae02535b2846f276a8cb" dependencies = [ "byteorder-lite", "byteview", @@ -3046,6 +2236,15 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab6473172471198271ff72e9379150e9dfd70d8e533e0752a27e515b48dd375e" +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + [[package]] name = "matchit" version = "0.8.4" @@ -3118,20 +2317,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", - "log", "wasi", "windows-sys 0.61.2", ] -[[package]] -name = "nibble_vec" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" -dependencies = [ - "smallvec", -] - [[package]] name = "nix" version = "0.30.1" @@ -3144,12 +2333,6 @@ dependencies = [ "libc", ] -[[package]] -name = "nodejs-built-in-modules" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5eb86a92577833b75522336f210c49d9ebd7dd55a44d80a92e68c668a75f27c" - [[package]] name = "nom" version = "7.1.3" @@ -3160,79 +2343,12 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "nom" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" -dependencies = [ - "memchr", -] - -[[package]] -name = "nonmax" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "610a5acd306ec67f907abe5567859a3c693fb9886eb1f012ab8f2a47bef3db51" - -[[package]] -name = "notify" -version = "8.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" -dependencies = [ - "bitflags 2.10.0", - "fsevent-sys", - "inotify", - "kqueue", - "libc", - "log", - "mio", - "notify-types", - "walkdir", - "windows-sys 0.60.2", -] - -[[package]] -name = "notify-types" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" - -[[package]] -name = "nu-ansi-term" -version = "0.50.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", -] - [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - [[package]] name = "num-traits" version = "0.2.19" @@ -3290,25 +2406,6 @@ dependencies = [ "objc2-encode", ] -[[package]] -name = "objc2-core-foundation" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" -dependencies = [ - "bitflags 2.10.0", -] - -[[package]] -name = "objc2-core-services" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583300ad934cba24ff5292aee751ecc070f7ca6b39a574cc21b7b5e588e06a0b" -dependencies = [ - "libc", - "objc2-core-foundation", -] - [[package]] name = "objc2-encode" version = "4.1.0" @@ -3342,520 +2439,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" -[[package]] -name = "outref" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" - [[package]] name = "owo-colors" version = "4.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52" -[[package]] -name = "oxc" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bad18615591b88dfe678605a9654701a43a69548e0488ed14aac03588e8892a3" -dependencies = [ - "oxc_allocator", - "oxc_ast", - "oxc_ast_visit", - "oxc_cfg", - "oxc_codegen", - "oxc_diagnostics", - "oxc_isolated_declarations", - "oxc_mangler", - "oxc_minifier", - "oxc_parser", - "oxc_regular_expression", - "oxc_semantic", - "oxc_span", - "oxc_syntax", - "oxc_transformer", - "oxc_transformer_plugins", -] - -[[package]] -name = "oxc-browserslist" -version = "2.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b48a7bf4591453d69792e735a8025b2c2c33ab75e02754023284ad17cfbbe04" -dependencies = [ - "bincode", - "flate2", - "nom 8.0.0", - "rustc-hash", - "serde", - "serde_json", - "thiserror 2.0.17", - "time", -] - -[[package]] -name = "oxc-miette" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f02105a875f3751a0b44b4c822b01177728dd9049ae6fb419e9b04887d730ed1" -dependencies = [ - "cfg-if", - "owo-colors", - "oxc-miette-derive", - "textwrap", - "thiserror 2.0.17", - "unicode-segmentation", - "unicode-width", -] - -[[package]] -name = "oxc-miette-derive" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "003b4612827f6501183873fb0735da92157e3c7daa71c40921c7d2758fec2229" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "oxc_allocator" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07f4ba3148223230c546c1064c2795ece78b647ca75c9e98d42418dd4d5e4cd7" -dependencies = [ - "allocator-api2", - "bumpalo", - "hashbrown 0.16.1", - "oxc_data_structures", - "oxc_estree", - "rustc-hash", - "serde", -] - -[[package]] -name = "oxc_ast" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33a08c611e6a481bc573c4050708f418da9ae8f09c4fac5c295c86cca6bbd1ed" -dependencies = [ - "bitflags 2.10.0", - "oxc_allocator", - "oxc_ast_macros", - "oxc_data_structures", - "oxc_diagnostics", - "oxc_estree", - "oxc_regular_expression", - "oxc_span", - "oxc_syntax", -] - -[[package]] -name = "oxc_ast_macros" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3788ddf2f5da12f0eca87c849bc33016b4bf11eea2b92980bb751e0b6a83b51a" -dependencies = [ - "phf", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "oxc_ast_visit" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c58fd9b2c7697eb1dea5d30d4ae575de810c27a414396542321e292feac0c22" -dependencies = [ - "oxc_allocator", - "oxc_ast", - "oxc_span", - "oxc_syntax", -] - -[[package]] -name = "oxc_cfg" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bf08aa784189e4bc09bcda52c70132c830e029dd2287d932b4e4189fcfb26b6" -dependencies = [ - "bitflags 2.10.0", - "itertools 0.14.0", - "oxc_index", - "oxc_syntax", - "petgraph", - "rustc-hash", -] - -[[package]] -name = "oxc_codegen" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31ce1592b043fe06b69d934902fb156ddb719716f4f79c505adbaf078700d4f2" -dependencies = [ - "bitflags 2.10.0", - "cow-utils", - "dragonbox_ecma", - "itoa", - "oxc_allocator", - "oxc_ast", - "oxc_data_structures", - "oxc_index", - "oxc_semantic", - "oxc_sourcemap", - "oxc_span", - "oxc_syntax", - "rustc-hash", -] - -[[package]] -name = "oxc_compat" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bd0ffbed285a4f1d8dab0fdac33e2fc0cddb9e68e2221838c9d6fd57a45182c" -dependencies = [ - "cow-utils", - "oxc-browserslist", - "oxc_syntax", - "rustc-hash", - "serde", -] - -[[package]] -name = "oxc_data_structures" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c6253c51f3836c35d932153398aa7582706f8e39876eae0d7163311f419afc1" -dependencies = [ - "ropey", -] - -[[package]] -name = "oxc_diagnostics" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbdefb78ab7e05e0ed1301f914905292542633fb6129da67ce82a9d3c87921c4" -dependencies = [ - "cow-utils", - "oxc-miette", - "percent-encoding", -] - -[[package]] -name = "oxc_ecmascript" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f845e02047887b1e4af5da1201b6d10f097f722e00cb5f7082bc847aa40f15ec" -dependencies = [ - "cow-utils", - "num-bigint", - "num-traits", - "oxc_allocator", - "oxc_ast", - "oxc_span", - "oxc_syntax", -] - -[[package]] -name = "oxc_estree" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd69fedb2ea8754a153e979e90fe31efed28789ead73d6d6fd69eb9025c729af" -dependencies = [ - "dragonbox_ecma", - "itoa", - "oxc_data_structures", -] - -[[package]] -name = "oxc_index" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3e6120999627ec9703025eab7c9f410ebb7e95557632a8902ca48210416c2b" -dependencies = [ - "nonmax", - "rayon", - "serde", -] - -[[package]] -name = "oxc_isolated_declarations" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b949e0360acb7bcd07bfa47eb661f117ad008fdf7e1dfe6a0b8de249050c768" -dependencies = [ - "bitflags 2.10.0", - "oxc_allocator", - "oxc_ast", - "oxc_ast_visit", - "oxc_diagnostics", - "oxc_ecmascript", - "oxc_span", - "oxc_syntax", - "rustc-hash", -] - -[[package]] -name = "oxc_mangler" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebab202547eb10b469babe7c764e023721a06cde95c1280b2798a91b9664c244" -dependencies = [ - "itertools 0.14.0", - "oxc_allocator", - "oxc_ast", - "oxc_data_structures", - "oxc_index", - "oxc_semantic", - "oxc_span", - "oxc_syntax", - "rustc-hash", -] - -[[package]] -name = "oxc_minifier" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01b0811db451b9196ee128a20e8cbc34494b5b6c190598696bee496a0b1031a6" -dependencies = [ - "cow-utils", - "oxc_allocator", - "oxc_ast", - "oxc_ast_visit", - "oxc_codegen", - "oxc_compat", - "oxc_data_structures", - "oxc_ecmascript", - "oxc_index", - "oxc_mangler", - "oxc_parser", - "oxc_regular_expression", - "oxc_semantic", - "oxc_span", - "oxc_syntax", - "oxc_traverse", - "rustc-hash", -] - -[[package]] -name = "oxc_parser" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ec8d0fd27fffd5742181d1ca76450e25ae51a69dffbbe2076231173b262ab31" -dependencies = [ - "bitflags 2.10.0", - "cow-utils", - "memchr", - "num-bigint", - "num-traits", - "oxc_allocator", - "oxc_ast", - "oxc_data_structures", - "oxc_diagnostics", - "oxc_ecmascript", - "oxc_regular_expression", - "oxc_span", - "oxc_syntax", - "rustc-hash", - "seq-macro", -] - -[[package]] -name = "oxc_regular_expression" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4af1f85405275d20352a69e7caaa44a28d03ba91ecc951ec79a1bf3016062d7" -dependencies = [ - "bitflags 2.10.0", - "oxc_allocator", - "oxc_ast_macros", - "oxc_diagnostics", - "oxc_span", - "phf", - "rustc-hash", - "unicode-id-start", -] - -[[package]] -name = "oxc_resolver" -version = "11.16.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fbba32382c25ae7d741aaf3c32475b7a1697540aa8e10b0b5e1cd3bfa1ef257" -dependencies = [ - "cfg-if", - "fast-glob", - "indexmap", - "json-strip-comments", - "nodejs-built-in-modules", - "once_cell", - "papaya", - "parking_lot", - "pnp", - "rustc-hash", - "rustix", - "self_cell", - "serde", - "serde_json", - "simd-json", - "simdutf8", - "thiserror 2.0.17", - "tracing", - "url", - "windows", -] - -[[package]] -name = "oxc_semantic" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac84d63f0e43359f38af2478a7d20cc0aecb780de46cada3f14d8ead6c89bf8c" -dependencies = [ - "itertools 0.14.0", - "oxc_allocator", - "oxc_ast", - "oxc_ast_visit", - "oxc_cfg", - "oxc_data_structures", - "oxc_diagnostics", - "oxc_ecmascript", - "oxc_index", - "oxc_span", - "oxc_syntax", - "phf", - "rustc-hash", - "self_cell", -] - -[[package]] -name = "oxc_sourcemap" -version = "6.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36801dbbd025f2fa133367494e38eef75a53d334ae6746ba0c889fc4e76fa3a3" -dependencies = [ - "base64-simd", - "json-escape-simd", - "rustc-hash", - "serde", - "serde_json", -] - -[[package]] -name = "oxc_span" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32556c52175b0c616e44efa6c37f532c6a4c3a213761a10ed1b9beb3d3136a78" -dependencies = [ - "compact_str", - "oxc-miette", - "oxc_allocator", - "oxc_ast_macros", - "oxc_estree", - "serde", -] - -[[package]] -name = "oxc_syntax" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5b2154749e8d2bf8ac77bfdf9f7e9912c4cb9b63ddfcc63841838a3be48d1a" -dependencies = [ - "bitflags 2.10.0", - "cow-utils", - "dragonbox_ecma", - "nonmax", - "oxc_allocator", - "oxc_ast_macros", - "oxc_data_structures", - "oxc_estree", - "oxc_index", - "oxc_span", - "phf", - "serde", - "unicode-id-start", -] - -[[package]] -name = "oxc_transformer" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aba5c1dca33ef51651f955fd90b1c005d8ea8ea570b220f7616ccdab50179b53" -dependencies = [ - "base64 0.22.1", - "compact_str", - "indexmap", - "itoa", - "memchr", - "oxc_allocator", - "oxc_ast", - "oxc_ast_visit", - "oxc_compat", - "oxc_data_structures", - "oxc_diagnostics", - "oxc_ecmascript", - "oxc_regular_expression", - "oxc_semantic", - "oxc_span", - "oxc_syntax", - "oxc_traverse", - "rustc-hash", - "serde", - "serde_json", - "sha1", -] - -[[package]] -name = "oxc_transformer_plugins" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f768070e71bad660f4344a949ed88a43cc87c1131a08da0f445f9b640f268e" -dependencies = [ - "cow-utils", - "itoa", - "oxc_allocator", - "oxc_ast", - "oxc_ast_visit", - "oxc_diagnostics", - "oxc_ecmascript", - "oxc_parser", - "oxc_semantic", - "oxc_span", - "oxc_syntax", - "oxc_transformer", - "oxc_traverse", - "rustc-hash", -] - -[[package]] -name = "oxc_traverse" -version = "0.106.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936417f4b572d4ca1ce4ac1aaf66f5f6d76d7302d486ed5c92af14d18f9e9155" -dependencies = [ - "itoa", - "oxc_allocator", - "oxc_ast", - "oxc_ast_visit", - "oxc_data_structures", - "oxc_ecmascript", - "oxc_semantic", - "oxc_span", - "oxc_syntax", - "rustc-hash", -] - -[[package]] -name = "papaya" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f" -dependencies = [ - "equivalent", - "seize", -] - [[package]] name = "parking_lot" version = "0.12.5" @@ -3874,7 +2463,7 @@ checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.18", + "redox_syscall", "smallvec", "windows-link", ] @@ -3885,12 +2474,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "pathdiff" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" - [[package]] name = "pathfinder_geometry" version = "0.5.1" @@ -3928,81 +2511,6 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" -[[package]] -name = "petgraph" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8701b58ea97060d5e5b155d383a69952a60943f0e6dfe30b04c287beb0b27455" -dependencies = [ - "fixedbitset", - "hashbrown 0.15.5", - "indexmap", - "serde", -] - -[[package]] -name = "phf" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" -dependencies = [ - "phf_macros", - "phf_shared", - "serde", -] - -[[package]] -name = "phf_generator" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" -dependencies = [ - "fastrand", - "phf_shared", -] - -[[package]] -name = "phf_macros" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" -dependencies = [ - "phf_generator", - "phf_shared", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "phf_shared" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pin-project" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "pin-project-lite" version = "0.2.16" @@ -4080,25 +2588,6 @@ dependencies = [ "miniz_oxide", ] -[[package]] -name = "pnp" -version = "0.12.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6e38320d5a8e386647f622067588bdb338c9e6e43eb32cf6f8991dd0e8f0046" -dependencies = [ - "byteorder", - "concurrent_lru", - "fancy-regex", - "flate2", - "nodejs-built-in-modules", - "pathdiff", - "radix_trie", - "rustc-hash", - "serde", - "serde_json", - "thiserror 2.0.17", -] - [[package]] name = "portable-atomic" version = "1.13.0" @@ -4149,9 +2638,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.104" +version = "1.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9695f8df41bb4f3d222c95a67532365f569318332d03d5f3f67f37b20e6ebdf0" +checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" dependencies = [ "unicode-ident", ] @@ -4179,9 +2668,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.42" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" +checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" dependencies = [ "proc-macro2", ] @@ -4192,16 +2681,6 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" -[[package]] -name = "radix_trie" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b4431027dcd37fc2a73ef740b5f233aa805897935b8bce0195e41bbf9a3289a" -dependencies = [ - "endian-type", - "nibble_vec", -] - [[package]] name = "rand" version = "0.8.5" @@ -4281,7 +2760,7 @@ dependencies = [ [[package]] name = "rawdb" -version = "0.5.4" +version = "0.5.6" dependencies = [ "libc", "log", @@ -4321,15 +2800,6 @@ dependencies = [ "bitflags 2.10.0", ] -[[package]] -name = "redox_syscall" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27" -dependencies = [ - "bitflags 2.10.0", -] - [[package]] name = "redox_users" version = "0.5.2" @@ -4390,16 +2860,6 @@ version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" -[[package]] -name = "regress" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2057b2325e68a893284d1538021ab90279adac1139957ca2a74426c6f118fb48" -dependencies = [ - "hashbrown 0.16.1", - "memchr", -] - [[package]] name = "ring" version = "0.17.14" @@ -4423,51 +2883,6 @@ dependencies = [ "libc", ] -[[package]] -name = "rolldown-ariadne" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77dff57c9de498bb1eb5b1ce682c2e3a0ae956b266fa0933c3e151b87b078967" -dependencies = [ - "unicode-width", - "yansi", -] - -[[package]] -name = "rolldown-notify" -version = "10.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8bf250d410b79487a6d054e6bd16ec08dddd2998f5f5e6291867a35066cfc37" -dependencies = [ - "bitflags 2.10.0", - "inotify", - "kqueue", - "libc", - "mio", - "objc2-core-foundation", - "objc2-core-services", - "rolldown-notify-types", - "tracing", - "walkdir", - "windows-sys 0.61.2", -] - -[[package]] -name = "rolldown-notify-types" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1931923a28e14c01a27ca56669669eb3e3de4068859c34e17b96c93ba3a61afe" - -[[package]] -name = "ropey" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93411e420bcd1a75ddd1dc3caf18c23155eda2c090631a85af21ba19e97093b5" -dependencies = [ - "smallvec", - "str_indices", -] - [[package]] name = "rustc-demangle" version = "0.1.26" @@ -4609,16 +3024,6 @@ dependencies = [ "cc", ] -[[package]] -name = "seize" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b55fb86dfd3a2f5f76ea78310a88f96c4ea21a3031f8d212443d56123fd0521" -dependencies = [ - "libc", - "windows-sys 0.61.2", -] - [[package]] name = "self_cell" version = "1.2.2" @@ -4631,12 +3036,6 @@ version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" -[[package]] -name = "seq-macro" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc711410fbe7399f390ca1c3b60ad0f53f80e95c5eb935e52268a0e2cd49acc" - [[package]] name = "serde" version = "1.0.228" @@ -4690,11 +3089,10 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.148" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3084b546a1dd6289475996f182a22aba973866ea8e8b02c51d9f46b1336a22da" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ - "indexmap", "itoa", "memchr", "serde", @@ -4758,17 +3156,6 @@ dependencies = [ "xxhash-rust", ] -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - [[package]] name = "sharded-slab" version = "0.1.7" @@ -4790,30 +3177,6 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" -[[package]] -name = "simd-json" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4255126f310d2ba20048db6321c81ab376f6a6735608bf11f0785c41f01f64e3" -dependencies = [ - "halfbrown", - "ref-cast", - "simdutf8", - "value-trait", -] - -[[package]] -name = "simdutf8" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" - -[[package]] -name = "siphasher" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" - [[package]] name = "slab" version = "0.4.11" @@ -4826,12 +3189,6 @@ version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" -[[package]] -name = "smawk" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" - [[package]] name = "socket2" version = "0.6.1" @@ -4870,18 +3227,6 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "str_indices" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d08889ec5408683408db66ad89e0e1f93dff55c73a4ccc71c427d5b277ee47e6" - [[package]] name = "strsim" version = "0.11.1" @@ -4909,20 +3254,11 @@ dependencies = [ "syn", ] -[[package]] -name = "sugar_path" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48abcb2199ce37819c20dc7a72dc09e3263a00e598ff5089fe5fda92e0f63c37" -dependencies = [ - "smallvec", -] - [[package]] name = "syn" -version = "2.0.113" +version = "2.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678faa00651c9eb72dd2020cbdf275d92eccb2400d568e419efdd64838145cb4" +checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" dependencies = [ "proc-macro2", "quote", @@ -4959,26 +3295,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "termcolor" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "textwrap" -version = "0.16.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" -dependencies = [ - "smawk", - "unicode-linebreak", - "unicode-width", -] - [[package]] name = "thiserror" version = "1.0.69" @@ -5035,10 +3351,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", + "itoa", "num-conv", "powerfmt", "serde", "time-core", + "time-macros", ] [[package]] @@ -5047,6 +3365,16 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" +[[package]] +name = "time-macros" +version = "0.2.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" +dependencies = [ + "num-conv", + "time-core", +] + [[package]] name = "tinystr" version = "0.8.2" @@ -5085,9 +3413,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" dependencies = [ "futures-core", "pin-project-lite", @@ -5096,9 +3424,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.17" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ "bytes", "futures-core", @@ -5218,6 +3546,18 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-appender" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" +dependencies = [ + "crossbeam-channel", + "thiserror 2.0.17", + "time", + "tracing-subscriber", +] + [[package]] name = "tracing-attributes" version = "0.1.31" @@ -5229,17 +3569,6 @@ dependencies = [ "syn", ] -[[package]] -name = "tracing-chrome" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf0a738ed5d6450a9fb96e86a23ad808de2b727fd1394585da5cdd6788ffe724" -dependencies = [ - "serde_json", - "tracing-core", - "tracing-subscriber", -] - [[package]] name = "tracing-core" version = "0.1.36" @@ -5260,64 +3589,19 @@ dependencies = [ "tracing-subscriber", ] -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-serde" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" -dependencies = [ - "serde", - "tracing-core", -] - [[package]] name = "tracing-subscriber" version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" dependencies = [ - "nu-ansi-term", - "serde", - "serde_json", + "matchers", + "once_cell", + "regex-automata", "sharded-slab", - "smallvec", "thread_local", + "tracing", "tracing-core", - "tracing-log", - "tracing-serde", -] - -[[package]] -name = "ts-rs" -version = "11.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4994acea2522cd2b3b85c1d9529a55991e3ad5e25cdcd3de9d505972c4379424" -dependencies = [ - "thiserror 2.0.17", - "ts-rs-macros", -] - -[[package]] -name = "ts-rs-macros" -version = "11.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee6ff59666c9cbaec3533964505d39154dc4e0a56151fdea30a09ed0301f62e2" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "termcolor", ] [[package]] @@ -5332,51 +3616,18 @@ version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ea3136b675547379c4bd395ca6b938e5ad3c3d20fad76e7fe85f9e0d011419c" -[[package]] -name = "typedmap" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63278e72ed4f207eb3216c944cbafb35bdb656d2eab97ef73c0c165a1cd3e319" -dependencies = [ - "dashmap", -] - -[[package]] -name = "typenum" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" - -[[package]] -name = "unicode-id-start" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81b79ad29b5e19de4260020f8919b443b2ef0277d242ce532ec7b7a2cc8b6007" - [[package]] name = "unicode-ident" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" -[[package]] -name = "unicode-linebreak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" - [[package]] name = "unicode-segmentation" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" -[[package]] -name = "unicode-width" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" - [[package]] name = "unicode-xid" version = "0.2.6" @@ -5389,30 +3640,19 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" -[[package]] -name = "unty" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" - [[package]] name = "url" -version = "2.5.7" +version = "2.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" dependencies = [ "form_urlencoded", "idna", "percent-encoding", "serde", + "serde_derive", ] -[[package]] -name = "urlencoding" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" - [[package]] name = "utf8_iter" version = "1.0.4" @@ -5442,18 +3682,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" -[[package]] -name = "value-trait" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e80f0c733af0720a501b3905d22e2f97662d8eacfe082a75ed7ffb5ab08cb59" -dependencies = [ - "float-cmp", - "halfbrown", - "itoa", - "ryu", -] - [[package]] name = "varint-rs" version = "2.2.0" @@ -5462,7 +3690,7 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23" [[package]] name = "vecdb" -version = "0.5.4" +version = "0.5.6" dependencies = [ "ctrlc", "log", @@ -5481,7 +3709,7 @@ dependencies = [ [[package]] name = "vecdb_derive" -version = "0.5.4" +version = "0.5.6" dependencies = [ "quote", "syn", @@ -5493,27 +3721,6 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" -[[package]] -name = "vfs" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e723b9e1c02a3cf9f9d0de6a4ddb8cdc1df859078902fe0ae0589d615711ae6" -dependencies = [ - "filetime", -] - -[[package]] -name = "virtue" -version = "0.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" - -[[package]] -name = "vsimd" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" - [[package]] name = "walkdir" version = "2.5.0" @@ -5637,27 +3844,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows" -version = "0.62.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "527fadee13e0c05939a6a05d5bd6eec6cd2e3dbd648b9f8e447c6518133d8580" -dependencies = [ - "windows-collections", - "windows-core", - "windows-future", - "windows-numerics", -] - -[[package]] -name = "windows-collections" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b2d95af1a8a14a3c7367e1ed4fc9c20e0a26e79551b1454d72583c97cc6610" -dependencies = [ - "windows-core", -] - [[package]] name = "windows-core" version = "0.62.2" @@ -5671,17 +3857,6 @@ dependencies = [ "windows-strings", ] -[[package]] -name = "windows-future" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d6f90251fe18a279739e78025bd6ddc52a7e22f921070ccdc67dde84c605cb" -dependencies = [ - "windows-core", - "windows-link", - "windows-threading", -] - [[package]] name = "windows-implement" version = "0.60.2" @@ -5710,16 +3885,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" -[[package]] -name = "windows-numerics" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e2e40844ac143cdb44aead537bbf727de9b044e107a0f1220392177d15b0f26" -dependencies = [ - "windows-core", - "windows-link", -] - [[package]] name = "windows-result" version = "0.4.1" @@ -5798,15 +3963,6 @@ dependencies = [ "windows_x86_64_msvc 0.53.1", ] -[[package]] -name = "windows-threading" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3949bd5b99cafdf1c7ca86b43ca564028dfe27d66958f2470940f73d86d75b37" -dependencies = [ - "windows-link", -] - [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -5939,12 +4095,6 @@ version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" -[[package]] -name = "yansi" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" - [[package]] name = "yeslogic-fontconfig-sys" version = "6.0.0" @@ -5981,18 +4131,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.31" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" +checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.31" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" +checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" dependencies = [ "proc-macro2", "quote", @@ -6075,9 +4225,9 @@ checksum = "40990edd51aae2c2b6907af74ffb635029d5788228222c4bb811e9351c0caad3" [[package]] name = "zmij" -version = "1.0.9" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee2a72b10d087f75fb2e1c2c7343e308fe6970527c22a41caf8372e165ff5c1" +checksum = "2fc5a66a20078bf1251bde995aa2fdcc4b800c70b5d92dd2c62abc5c60f679f8" [[package]] name = "zopfli" diff --git a/Cargo.toml b/Cargo.toml index 1f4b99934..c8ee4613a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,6 @@ bitcoincore-rpc = "0.19.0" brk_alloc = { version = "0.1.0-alpha.2", path = "crates/brk_alloc" } brk_bencher = { version = "0.1.0-alpha.2", path = "crates/brk_bencher" } brk_bindgen = { version = "0.1.0-alpha.2", path = "crates/brk_bindgen" } -brk_bundler = { version = "0.1.0-alpha.2", path = "crates/brk_bundler" } brk_cli = { version = "0.1.0-alpha.2", path = "crates/brk_cli" } brk_client = { version = "0.1.0-alpha.2", path = "crates/brk_client" } brk_cohort = { version = "0.1.0-alpha.2", path = "crates/brk_cohort" } @@ -51,25 +50,23 @@ brk_computer = { version = "0.1.0-alpha.2", path = "crates/brk_computer" } brk_error = { version = "0.1.0-alpha.2", path = "crates/brk_error" } brk_fetcher = { version = "0.1.0-alpha.2", path = "crates/brk_fetcher" } brk_indexer = { version = "0.1.0-alpha.2", path = "crates/brk_indexer" } -brk_query = { version = "0.1.0-alpha.2", path = "crates/brk_query", features = ["tokio"] } brk_iterator = { version = "0.1.0-alpha.2", path = "crates/brk_iterator" } brk_logger = { version = "0.1.0-alpha.2", path = "crates/brk_logger" } brk_mcp = { version = "0.1.0-alpha.2", path = "crates/brk_mcp" } brk_mempool = { version = "0.1.0-alpha.2", path = "crates/brk_mempool" } +brk_query = { version = "0.1.0-alpha.2", path = "crates/brk_query", features = ["tokio"] } brk_reader = { version = "0.1.0-alpha.2", path = "crates/brk_reader" } brk_rpc = { version = "0.1.0-alpha.2", path = "crates/brk_rpc" } brk_server = { version = "0.1.0-alpha.2", path = "crates/brk_server" } brk_store = { version = "0.1.0-alpha.2", path = "crates/brk_store" } -brk_types = { version = "0.1.0-alpha.2", path = "crates/brk_types" } brk_traversable = { version = "0.1.0-alpha.2", path = "crates/brk_traversable", features = ["pco", "derive"] } brk_traversable_derive = { version = "0.1.0-alpha.2", path = "crates/brk_traversable_derive" } +brk_types = { version = "0.1.0-alpha.2", path = "crates/brk_types" } byteview = "0.10.0" color-eyre = "0.6.5" derive_more = { version = "2.1.1", features = ["deref", "deref_mut"] } -env_logger = "0.11.8" -fjall = "3.0.0" -jiff = "0.2.17" -log = "0.4.29" +fjall = "3.0.1" +jiff = "0.2.18" minreq = { version = "2.14.1", features = ["https", "serde_json"] } parking_lot = "0.12.5" rayon = "1.11.0" @@ -78,10 +75,11 @@ schemars = "1.2.0" serde = "1.0.228" serde_bytes = "0.11.19" serde_derive = "1.0.228" -serde_json = { version = "1.0.148", features = ["float_roundtrip"] } +serde_json = { version = "1.0.149", features = ["float_roundtrip"] } smallvec = "1.15.1" tokio = { version = "1.49.0", features = ["rt-multi-thread"] } -# vecdb = { version = "0.5.4", features = ["derive", "serde_json", "pco", "schemars"] } +tracing = { version = "0.1", default-features = false, features = ["std"] } +# vecdb = { version = "0.5.6", features = ["derive", "serde_json", "pco", "schemars"] } vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] } # vecdb = { git = "https://github.com/anydb-rs/anydb", features = ["derive", "serde_json", "pco"] } diff --git a/crates/brk/Cargo.toml b/crates/brk/Cargo.toml index a109425e9..1fab87bdd 100644 --- a/crates/brk/Cargo.toml +++ b/crates/brk/Cargo.toml @@ -12,7 +12,6 @@ build = "build.rs" full = [ "bencher", "binder", - "bundler", "client", "computer", "error", @@ -33,7 +32,6 @@ full = [ ] bencher = ["brk_bencher"] binder = ["brk_bindgen"] -bundler = ["brk_bundler"] client = ["brk_client"] computer = ["brk_computer"] error = ["brk_error"] @@ -55,7 +53,6 @@ types = ["brk_types"] [dependencies] brk_bencher = { workspace = true, optional = true } brk_bindgen = { workspace = true, optional = true } -brk_bundler = { workspace = true, optional = true } brk_client = { workspace = true, optional = true } brk_computer = { workspace = true, optional = true } brk_error = { workspace = true, optional = true } diff --git a/crates/brk/README.md b/crates/brk/README.md index 44d784722..3019541e9 100644 --- a/crates/brk/README.md +++ b/crates/brk/README.md @@ -24,7 +24,6 @@ use brk::types::Height; |---------|-------|-------------| | `bencher` | `brk_bencher` | Benchmarking utilities | | `binder` | `brk_binder` | Client code generation | -| `bundler` | `brk_bundler` | JS bundling | | `client` | `brk_client` | Generated Rust API client | | `computer` | `brk_computer` | Metric computation | | `error` | `brk_error` | Error types | diff --git a/crates/brk/src/lib.rs b/crates/brk/src/lib.rs index 5fde17bff..5a37a7ae2 100644 --- a/crates/brk/src/lib.rs +++ b/crates/brk/src/lib.rs @@ -8,10 +8,6 @@ pub use brk_bencher as bencher; #[doc(inline)] pub use brk_bindgen as binder; -#[cfg(feature = "bundler")] -#[doc(inline)] -pub use brk_bundler as bundler; - #[cfg(feature = "client")] #[doc(inline)] pub use brk_client as client; diff --git a/crates/brk_alloc/Cargo.toml b/crates/brk_alloc/Cargo.toml index d22da7eae..c68b6995a 100644 --- a/crates/brk_alloc/Cargo.toml +++ b/crates/brk_alloc/Cargo.toml @@ -9,5 +9,5 @@ repository.workspace = true [dependencies] libmimalloc-sys = { version = "0.1.44", features = ["extended"] } -log = { workspace = true } +tracing = { workspace = true } mimalloc = { version = "0.1.48", features = ["v3"] } diff --git a/crates/brk_bindgen/src/analysis/patterns.rs b/crates/brk_bindgen/src/analysis/patterns.rs index 8d0aa7734..96cfd8678 100644 --- a/crates/brk_bindgen/src/analysis/patterns.rs +++ b/crates/brk_bindgen/src/analysis/patterns.rs @@ -199,7 +199,7 @@ fn resolve_branch_patterns( for (child_name, child_node) in children { let (rust_type, json_type, indexes, child_fields) = match child_node { TreeNode::Leaf(leaf) => ( - leaf.value_type().to_string(), + leaf.kind().to_string(), schema_to_json_type(&leaf.schema), leaf.indexes().clone(), Vec::new(), diff --git a/crates/brk_bindgen/src/analysis/tree.rs b/crates/brk_bindgen/src/analysis/tree.rs index 42a87ab98..f6889e98c 100644 --- a/crates/brk_bindgen/src/analysis/tree.rs +++ b/crates/brk_bindgen/src/analysis/tree.rs @@ -35,7 +35,7 @@ pub fn get_node_fields( .map(|(name, node)| { let (rust_type, json_type, indexes) = match node { TreeNode::Leaf(leaf) => ( - leaf.value_type().to_string(), + leaf.kind().to_string(), schema_to_json_type(&leaf.schema), leaf.indexes().clone(), ), @@ -228,7 +228,7 @@ pub fn get_fields_with_child_info( .map(|(name, node)| { let (rust_type, json_type, indexes, child_fields) = match node { TreeNode::Leaf(leaf) => ( - leaf.value_type().to_string(), + leaf.kind().to_string(), schema_to_json_type(&leaf.schema), leaf.indexes().clone(), None, diff --git a/crates/brk_bindgen/src/generators/javascript/tree.rs b/crates/brk_bindgen/src/generators/javascript/tree.rs index 58b531bfe..dc40fdaa4 100644 --- a/crates/brk_bindgen/src/generators/javascript/tree.rs +++ b/crates/brk_bindgen/src/generators/javascript/tree.rs @@ -107,7 +107,11 @@ pub fn generate_main_client( let pattern_lookup = metadata.pattern_lookup(); writeln!(output, "/**").unwrap(); - writeln!(output, " * Main BRK client with catalog tree and API methods").unwrap(); + writeln!( + output, + " * Main BRK client with catalog tree and API methods" + ) + .unwrap(); writeln!(output, " * @extends BrkClientBase").unwrap(); writeln!(output, " */").unwrap(); writeln!(output, "class BrkClient extends BrkClientBase {{").unwrap(); @@ -136,53 +140,6 @@ pub fn generate_main_client( generate_api_methods(output, endpoints); - // Instance method: mergeMetricPatterns - writeln!(output, r#" - /** - * Merge multiple MetricPatterns into a single pattern. - * Throws if any two patterns have overlapping indexes. - * @template T - * @param {{...MetricPattern}} patterns - The patterns to merge - * @returns {{MetricPattern}} A new merged pattern - */ - mergeMetricPatterns(...patterns) {{ - if (patterns.length === 0) {{ - throw new BrkError('mergeMetricPatterns requires at least one pattern'); - }} - if (patterns.length === 1) {{ - return patterns[0]; - }} - - const seenIndexes = /** @type {{Map}} */ (new Map()); - const mergedBy = /** @type {{Partial>>}} */ ({{}}); - - for (const pattern of patterns) {{ - for (const index of pattern.indexes()) {{ - const existing = seenIndexes.get(index); - if (existing !== undefined) {{ - throw new BrkError(`Index '${{index}}' exists in both '${{existing}}' and '${{pattern.name}}'`); - }} - seenIndexes.set(index, pattern.name); - Object.defineProperty(mergedBy, index, {{ - get() {{ return pattern.get(index); }}, - enumerable: true, - configurable: true, - }}); - }} - }} - - const allIndexes = /** @type {{Index[]}} */ ([...seenIndexes.keys()]); - const firstName = patterns[0].name; - - return {{ - name: firstName, - by: mergedBy, - indexes() {{ return allIndexes; }}, - get(index) {{ return mergedBy[index]; }}, - }}; - }} -"#).unwrap(); - writeln!(output, "}}\n").unwrap(); writeln!(output, "export {{ BrkClient, BrkError }};").unwrap(); @@ -216,7 +173,11 @@ fn generate_tree_initializer( writeln!( output, "{}{}: create{}(this, '{}'){}", - indent_str, field_name, accessor.name, leaf.name(), comma + indent_str, + field_name, + accessor.name, + leaf.name(), + comma ) .unwrap(); } diff --git a/crates/brk_bindgen/src/generators/python/api.rs b/crates/brk_bindgen/src/generators/python/api.rs index 5e7ac89ad..5d3254626 100644 --- a/crates/brk_bindgen/src/generators/python/api.rs +++ b/crates/brk_bindgen/src/generators/python/api.rs @@ -123,17 +123,24 @@ fn endpoint_to_method_name(endpoint: &Endpoint) -> String { fn build_method_params(endpoint: &Endpoint) -> String { let mut params = Vec::new(); + // Path params are always required for param in &endpoint.path_params { let safe_name = escape_python_keyword(¶m.name); let py_type = js_type_to_python(¶m.param_type); params.push(format!(", {}: {}", safe_name, py_type)); } + // Required query params must come before optional ones (Python syntax requirement) for param in &endpoint.query_params { - let safe_name = escape_python_keyword(¶m.name); - let py_type = js_type_to_python(¶m.param_type); if param.required { + let safe_name = escape_python_keyword(¶m.name); + let py_type = js_type_to_python(¶m.param_type); params.push(format!(", {}: {}", safe_name, py_type)); - } else { + } + } + for param in &endpoint.query_params { + if !param.required { + let safe_name = escape_python_keyword(¶m.name); + let py_type = js_type_to_python(¶m.param_type); params.push(format!(", {}: Optional[{}] = None", safe_name, py_type)); } } diff --git a/crates/brk_bindgen/src/lib.rs b/crates/brk_bindgen/src/lib.rs index f65204736..52eba190a 100644 --- a/crates/brk_bindgen/src/lib.rs +++ b/crates/brk_bindgen/src/lib.rs @@ -142,7 +142,7 @@ fn collect_leaf_type_schemas(node: &TreeNode, schemas: &mut TypeSchemas) { collect_schema_definitions(&leaf.schema, schemas); // Get the type name for this leaf - let type_name = extract_inner_type(leaf.value_type()); + let type_name = extract_inner_type(leaf.kind()); if let Entry::Vacant(e) = schemas.entry(type_name) { // Unwrap single-element allOf diff --git a/crates/brk_bundler/Cargo.toml b/crates/brk_bundler/Cargo.toml deleted file mode 100644 index 7b2f8e18f..000000000 --- a/crates/brk_bundler/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "brk_bundler" -description = "A thin wrapper around rolldown" -version.workspace = true -edition.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -build = "build.rs" - -[dependencies] -log = { workspace = true } -notify = "8.2.0" -# rolldown = { path = "../../../rolldown/crates/rolldown", package = "brk_rolldown" } -rolldown = { version = "0.7.0", package = "brk_rolldown" } -sugar_path = "1.2.1" -tokio = { workspace = true } - -[dev-dependencies] -env_logger = { workspace = true } diff --git a/crates/brk_bundler/README.md b/crates/brk_bundler/README.md deleted file mode 100644 index 23f41bb0c..000000000 --- a/crates/brk_bundler/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# brk_bundler - -JavaScript bundling with watch mode for BRK web interfaces. - -## What It Enables - -Bundle and minify JavaScript modules using Rolldown, with file watching for development. Handles module copying, source map generation, and cache-busting via hashed filenames. - -## Key Features - -- **Rolldown integration**: Fast Rust-based bundler with tree-shaking and minification -- **Watch mode**: Rebuilds on file changes with live module syncing -- **Source maps**: Full debugging support in production builds -- **Cache busting**: Hashes main bundle filename, updates HTML references automatically -- **Service worker versioning**: Injects package version into service worker files - -## Core API - -```rust,ignore -// One-shot build -let dist = bundle(modules_path, websites_path, "src", false).await?; - -// Watch mode for development -bundle(modules_path, websites_path, "src", true).await?; -``` - -## Build Pipeline - -1. Copy shared modules to source scripts directory -2. Bundle with Rolldown (minified, with source maps) -3. Update `index.html` with hashed script references -4. Inject version into service worker diff --git a/crates/brk_bundler/build.rs b/crates/brk_bundler/build.rs deleted file mode 100644 index a4055a31e..000000000 --- a/crates/brk_bundler/build.rs +++ /dev/null @@ -1,8 +0,0 @@ -fn main() { - let profile = std::env::var("PROFILE").unwrap_or_default(); - - if profile == "release" { - println!("cargo:rustc-flag=-C"); - println!("cargo:rustc-flag=target-cpu=native"); - } -} diff --git a/crates/brk_bundler/examples/bundle.rs b/crates/brk_bundler/examples/bundle.rs deleted file mode 100644 index 46dfeba4b..000000000 --- a/crates/brk_bundler/examples/bundle.rs +++ /dev/null @@ -1,37 +0,0 @@ -use std::{io, path::PathBuf, thread, time::Duration}; - -use brk_bundler::bundle; - -fn find_dev_dirs() -> Option<(PathBuf, PathBuf)> { - let mut dir = std::env::current_dir().ok()?; - loop { - let websites = dir.join("websites"); - let modules = dir.join("modules"); - if websites.exists() && modules.exists() { - return Some((websites, modules)); - } - // Stop at workspace root (crates/ indicates we're there) - if dir.join("crates").exists() { - return None; - } - dir = dir.parent()?.to_path_buf(); - } -} - -#[tokio::main] -async fn main() -> io::Result<()> { - env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("debug")).init(); - - let (websites_path, modules_path) = - find_dev_dirs().expect("Run from within the brk workspace"); - let source_folder = "bitview"; - - let dist_path = bundle(&modules_path, &websites_path, source_folder, true).await?; - - println!("Bundle created at: {}", dist_path.display()); - println!("Watching for changes... (Ctrl+C to stop)"); - - loop { - thread::sleep(Duration::from_secs(60)); - } -} diff --git a/crates/brk_bundler/src/lib.rs b/crates/brk_bundler/src/lib.rs deleted file mode 100644 index 11a76151a..000000000 --- a/crates/brk_bundler/src/lib.rs +++ /dev/null @@ -1,246 +0,0 @@ -#![doc = include_str!("../README.md")] - -use std::{ - fs, io, - path::{Path, PathBuf}, - time::Duration, -}; - -use log::{debug, error, info}; -use notify::{EventKind, PollWatcher, RecursiveMode, Watcher}; -use rolldown::{ - Bundler, BundlerConfig, BundlerOptions, InlineConstConfig, InlineConstMode, InlineConstOption, - OptimizationOption, RawMinifyOptions, SourceMapType, -}; -use sugar_path::SugarPath; - -const VERSION: &str = env!("CARGO_PKG_VERSION"); - -pub async fn bundle( - modules_path: &Path, - websites_path: &Path, - source_folder: &str, - watch: bool, -) -> io::Result { - let relative_modules_path = modules_path; - let relative_source_path = websites_path.join(source_folder); - let relative_dist_path = websites_path.join("dist"); - - let absolute_modules_path = relative_modules_path.absolutize(); - let absolute_modules_path_clone = absolute_modules_path.clone(); - let absolute_websites_path = websites_path.absolutize(); - let absolute_websites_path_clone = absolute_websites_path.clone(); - - let absolute_source_path = relative_source_path.absolutize(); - let absolute_source_index_path = absolute_source_path.join("index.html"); - let absolute_source_index_path_clone = absolute_source_index_path.clone(); - let absolute_source_scripts_path = absolute_source_path.join("scripts"); - let absolute_source_scripts_modules_path = absolute_source_scripts_path.join("modules"); - let absolute_source_sw_path = absolute_source_path.join("service-worker.js"); - let absolute_source_sw_path_clone = absolute_source_sw_path.clone(); - - let absolute_dist_path = relative_dist_path.absolutize(); - let absolute_dist_scripts_path = absolute_dist_path.join("scripts"); - let absolute_dist_scripts_entry_path = absolute_dist_scripts_path.join("entry.js"); - let absolute_dist_scripts_entry_path_clone = absolute_dist_scripts_entry_path.clone(); - let absolute_dist_index_path = absolute_dist_path.join("index.html"); - let absolute_dist_sw_path = absolute_dist_path.join("service-worker.js"); - - info!("Bundling {source_folder}..."); - info!(" modules: {absolute_modules_path:?}"); - info!(" source: {absolute_source_path:?}"); - info!(" dist: {absolute_dist_path:?}"); - - let _ = fs::remove_dir_all(&absolute_dist_path); - let _ = fs::remove_dir_all(&absolute_source_scripts_modules_path); - copy_dir_all( - &absolute_modules_path, - &absolute_source_scripts_modules_path, - )?; - copy_dir_all(&absolute_source_path, &absolute_dist_path)?; - fs::remove_dir_all(&absolute_dist_scripts_path)?; - fs::create_dir(&absolute_dist_scripts_path)?; - - // dbg!(BundlerOptions::default()); - - let bundler_options = BundlerOptions { - input: Some(vec![format!("./{source_folder}/scripts/entry.js").into()]), - dir: Some("./dist/scripts".to_string()), - cwd: Some(absolute_websites_path), - minify: Some(RawMinifyOptions::Bool(true)), - sourcemap: Some(SourceMapType::File), - // advanced_chunks: Some(AdvancedChunksOptions { - // // min_size: Some(1000.0), - // min_share_count: Some(20), - // // min_module_size: S - // // include_dependencies_recursively: Some(true), - // ..Default::default() - // }), - // - // inline_dynamic_imports - // experimental: Some(ExperimentalOptions { - // strict_execution_order: Some(true), - // ..Default::default() - // }), - optimization: Some(OptimizationOption { - inline_const: Some(InlineConstOption::Config(InlineConstConfig { - mode: Some(InlineConstMode::All), - ..Default::default() - })), - // Needs benchmarks - // pife_for_module_wrappers: Some(true), - ..Default::default() - }), - ..Default::default() - }; - - let mut bundler = Bundler::new(bundler_options.clone()).unwrap(); - - if let Err(error) = bundler.write().await { - error!("{error:?}"); - } - - let update_dist_index = move || { - let mut contents = fs::read_to_string(&absolute_source_index_path).unwrap(); - - if let Ok(entry) = fs::read_to_string(&absolute_dist_scripts_entry_path_clone) - && let Some(start) = entry.find("main") - && let Some(end) = entry.find(".js") - { - let main_hashed = &entry[start..end]; - contents = contents.replace("/scripts/main.js", &format!("/scripts/{main_hashed}.js")); - } - - let _ = fs::write(&absolute_dist_index_path, contents); - }; - - let update_source_sw = move || { - let contents = fs::read_to_string(&absolute_source_sw_path) - .unwrap() - .replace("__VERSION__", &format!("v{VERSION}")); - let _ = fs::write(&absolute_dist_sw_path, contents); - }; - - update_dist_index(); - update_source_sw(); - - if !watch { - return Ok(relative_dist_path); - } - - // Clone paths for the second watcher - let absolute_websites_path_clone2 = absolute_websites_path_clone.clone(); - let absolute_modules_path_clone2 = absolute_modules_path_clone.clone(); - - tokio::spawn(async move { - let handle_event = { - let absolute_dist_scripts_entry_path = absolute_dist_scripts_entry_path.clone(); - let absolute_source_index_path_clone = absolute_source_index_path_clone.clone(); - let absolute_source_sw_path_clone = absolute_source_sw_path_clone.clone(); - let absolute_modules_path = absolute_modules_path.clone(); - let absolute_source_scripts_modules_path = absolute_source_scripts_modules_path.clone(); - let absolute_source_path = absolute_source_path.clone(); - let absolute_source_scripts_path = absolute_source_scripts_path.clone(); - let absolute_dist_path = absolute_dist_path.clone(); - let update_dist_index = update_dist_index.clone(); - let update_source_sw = update_source_sw.clone(); - - move |path: PathBuf| { - let path = path.absolutize(); - - if path == absolute_dist_scripts_entry_path - || path == absolute_source_index_path_clone - { - update_dist_index(); - } else if path == absolute_source_sw_path_clone { - update_source_sw(); - } else if let Ok(suffix) = path.strip_prefix(&absolute_modules_path) { - let dest = absolute_source_scripts_modules_path.join(suffix); - if path.is_file() { - debug!("Copying module: {path:?} -> {dest:?}"); - let _ = fs::create_dir_all(dest.parent().unwrap()); - if let Err(e) = fs::copy(&path, &dest) { - error!("Copy failed: {e}"); - } - } - } else if let Ok(suffix) = path.strip_prefix(&absolute_source_path) - // scripts are handled by rolldown - && !path.starts_with(&absolute_source_scripts_path) - { - let dist_path = absolute_dist_path.join(suffix); - if path.is_file() { - let _ = fs::create_dir_all(path.parent().unwrap()); - let _ = fs::copy(&path, &dist_path); - } - } - } - }; - - // FSEvents watcher for instant response to manual saves - let handle_event_clone = handle_event.clone(); - let mut fs_watcher = notify::recommended_watcher( - move |res: Result| match res { - Ok(event) => match event.kind { - EventKind::Create(_) | EventKind::Modify(_) => { - event.paths.into_iter().for_each(&handle_event_clone); - } - _ => {} - }, - Err(e) => error!("fs watch error: {e:?}"), - }, - ) - .unwrap(); - - fs_watcher - .watch(&absolute_websites_path_clone, RecursiveMode::Recursive) - .unwrap(); - fs_watcher - .watch(&absolute_modules_path_clone, RecursiveMode::Recursive) - .unwrap(); - - // Poll watcher to catch programmatic edits (e.g., Claude Code's atomic writes) - let poll_config = notify::Config::default() - .with_poll_interval(Duration::from_secs(1)); - let mut poll_watcher = PollWatcher::new( - move |res: Result| match res { - Ok(event) => match event.kind { - EventKind::Create(_) | EventKind::Modify(_) => { - event.paths.into_iter().for_each(&handle_event); - } - _ => {} - }, - Err(e) => error!("poll watch error: {e:?}"), - }, - poll_config, - ) - .unwrap(); - - poll_watcher - .watch(&absolute_websites_path_clone2, RecursiveMode::Recursive) - .unwrap(); - poll_watcher - .watch(&absolute_modules_path_clone2, RecursiveMode::Recursive) - .unwrap(); - - let config = BundlerConfig::new(bundler_options, vec![]); - let watcher = rolldown::Watcher::new(config, None).unwrap(); - - watcher.start().await; - }); - - Ok(relative_dist_path) -} - -fn copy_dir_all(src: impl AsRef, dst: impl AsRef) -> io::Result<()> { - fs::create_dir_all(&dst)?; - for entry in fs::read_dir(src)? { - let entry = entry?; - let ty = entry.file_type()?; - if ty.is_dir() { - copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?; - } else { - fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?; - } - } - Ok(()) -} diff --git a/crates/brk_cli/Cargo.toml b/crates/brk_cli/Cargo.toml index ed2dab225..ba9ca70ed 100644 --- a/crates/brk_cli/Cargo.toml +++ b/crates/brk_cli/Cargo.toml @@ -9,8 +9,8 @@ repository.workspace = true build = "build.rs" [dependencies] +brk_alloc = { workspace = true } brk_bindgen = { workspace = true } -brk_bundler = { workspace = true } brk_computer = { workspace = true } brk_error = { workspace = true } brk_fetcher = { workspace = true } @@ -24,8 +24,9 @@ brk_rpc = { workspace = true } brk_server = { workspace = true } clap = { version = "4.5.54", features = ["derive", "string"] } color-eyre = { workspace = true } -brk_alloc = { workspace = true } -log = { workspace = true } +importmap = "0.1.1" +# importmap = { path = "../../../importmap" } +tracing = { workspace = true } minreq = { workspace = true } serde = { workspace = true } tokio = { workspace = true } diff --git a/crates/brk_cli/README.md b/crates/brk_cli/README.md index 0625daacf..fe24c3081 100644 --- a/crates/brk_cli/README.md +++ b/crates/brk_cli/README.md @@ -57,4 +57,3 @@ Full benchmark data: [`https://github.com/bitcoinresearchkit/benches/tree/main/b - `brk_computer` for metric computation - `brk_mempool` for mempool monitoring - `brk_server` for HTTP API -- `brk_bundler` for web interface bundling diff --git a/crates/brk_cli/src/main.rs b/crates/brk_cli/src/main.rs index c41084ab6..a73eab8ca 100644 --- a/crates/brk_cli/src/main.rs +++ b/crates/brk_cli/src/main.rs @@ -9,7 +9,6 @@ use std::{ }; use brk_alloc::Mimalloc; -use brk_bundler::bundle; use brk_computer::Computer; use brk_error::Result; use brk_indexer::Indexer; @@ -18,7 +17,7 @@ use brk_mempool::Mempool; use brk_query::AsyncQuery; use brk_reader::Reader; use brk_server::{Server, VERSION}; -use log::info; +use tracing::info; use vecdb::Exit; mod config; @@ -100,17 +99,11 @@ pub fn run() -> color_eyre::Result<()> { } }; - let websites_path; - let modules_path; - - if let Some((websites, modules)) = find_dev_dirs() { - websites_path = websites; - modules_path = modules; + let websites_path = if let Some((websites, _modules)) = find_dev_dirs() { + websites } else { let downloaded_brk_path = downloads_path.join(format!("brk-{VERSION}")); - let downloaded_websites_path = downloaded_brk_path.join("websites"); - let downloaded_modules_path = downloaded_brk_path.join("modules"); if !fs::exists(&downloaded_websites_path)? { info!("Downloading source from Github..."); @@ -128,15 +121,30 @@ pub fn run() -> color_eyre::Result<()> { zip.extract(downloads_path).unwrap(); } - websites_path = downloaded_websites_path; - modules_path = downloaded_modules_path; - } + downloaded_websites_path + }; Some(websites_path.join(website.to_folder_name())) } else { None }; + // Generate import map for cache busting + if let Some(ref path) = bundle_path { + match importmap::ImportMap::scan(path, "") { + Ok(map) => { + let html_path = path.join("index.html"); + if let Ok(html) = fs::read_to_string(&html_path) + && let Some(updated) = map.update_html(&html) + { + let _ = fs::write(&html_path, updated); + info!("Updated importmap in index.html"); + } + } + Err(e) => tracing::error!("Failed to generate importmap: {e}"), + } + } + let server = Server::new(&query, bundle_path); tokio::spawn(async move { diff --git a/crates/brk_client/src/lib.rs b/crates/brk_client/src/lib.rs index 6fac43329..5feb29f7d 100644 --- a/crates/brk_client/src/lib.rs +++ b/crates/brk_client/src/lib.rs @@ -461,15 +461,12 @@ pub struct MetricPattern4By { } impl MetricPattern4By { + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + } pub fn by_decadeindex(&self) -> Endpoint { Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) } - pub fn by_difficultyepoch(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) - } - pub fn by_height(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) - } pub fn by_monthindex(&self) -> Endpoint { Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) } @@ -487,7 +484,7 @@ impl MetricPattern4By { } } -/// Index accessor for metrics with 8 indexes. +/// Index accessor for metrics with 7 indexes. pub struct MetricPattern4 { client: Arc, name: Arc, @@ -521,9 +518,8 @@ impl AnyMetricPattern for MetricPattern4 { fn indexes(&self) -> &'static [Index] { &[ + Index::DateIndex, Index::DecadeIndex, - Index::DifficultyEpoch, - Index::Height, Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, @@ -536,9 +532,8 @@ impl AnyMetricPattern for MetricPattern4 { impl MetricPattern for MetricPattern4 { fn get(&self, index: Index) -> Option> { match index { + Index::DateIndex => Some(self.by.by_dateindex()), Index::DecadeIndex => Some(self.by.by_decadeindex()), - Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), - Index::Height => Some(self.by.by_height()), Index::MonthIndex => Some(self.by.by_monthindex()), Index::QuarterIndex => Some(self.by.by_quarterindex()), Index::SemesterIndex => Some(self.by.by_semesterindex()), @@ -557,9 +552,6 @@ pub struct MetricPattern5By { } impl MetricPattern5By { - pub fn by_dateindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) - } pub fn by_decadeindex(&self) -> Endpoint { Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) } @@ -580,7 +572,7 @@ impl MetricPattern5By { } } -/// Index accessor for metrics with 7 indexes. +/// Index accessor for metrics with 6 indexes. pub struct MetricPattern5 { client: Arc, name: Arc, @@ -614,7 +606,6 @@ impl AnyMetricPattern for MetricPattern5 { fn indexes(&self) -> &'static [Index] { &[ - Index::DateIndex, Index::DecadeIndex, Index::MonthIndex, Index::QuarterIndex, @@ -628,7 +619,6 @@ impl AnyMetricPattern for MetricPattern5 { impl MetricPattern for MetricPattern5 { fn get(&self, index: Index) -> Option> { match index { - Index::DateIndex => Some(self.by.by_dateindex()), Index::DecadeIndex => Some(self.by.by_decadeindex()), Index::MonthIndex => Some(self.by.by_monthindex()), Index::QuarterIndex => Some(self.by.by_quarterindex()), @@ -648,30 +638,18 @@ pub struct MetricPattern6By { } impl MetricPattern6By { - pub fn by_decadeindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) - } - pub fn by_difficultyepoch(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) - } - pub fn by_monthindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) - } pub fn by_quarterindex(&self) -> Endpoint { Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) } pub fn by_semesterindex(&self) -> Endpoint { Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) } - pub fn by_weekindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) - } pub fn by_yearindex(&self) -> Endpoint { Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) } } -/// Index accessor for metrics with 7 indexes. +/// Index accessor for metrics with 3 indexes. pub struct MetricPattern6 { client: Arc, name: Arc, @@ -705,12 +683,8 @@ impl AnyMetricPattern for MetricPattern6 { fn indexes(&self) -> &'static [Index] { &[ - Index::DecadeIndex, - Index::DifficultyEpoch, - Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, - Index::WeekIndex, Index::YearIndex, ] } @@ -719,12 +693,8 @@ impl AnyMetricPattern for MetricPattern6 { impl MetricPattern for MetricPattern6 { fn get(&self, index: Index) -> Option> { match index { - Index::DecadeIndex => Some(self.by.by_decadeindex()), - Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), - Index::MonthIndex => Some(self.by.by_monthindex()), Index::QuarterIndex => Some(self.by.by_quarterindex()), Index::SemesterIndex => Some(self.by.by_semesterindex()), - Index::WeekIndex => Some(self.by.by_weekindex()), Index::YearIndex => Some(self.by.by_yearindex()), _ => None, } @@ -739,27 +709,15 @@ pub struct MetricPattern7By { } impl MetricPattern7By { - pub fn by_decadeindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) } - pub fn by_monthindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) - } - pub fn by_quarterindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) - } - pub fn by_semesterindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) - } - pub fn by_weekindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) - } - pub fn by_yearindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) } } -/// Index accessor for metrics with 6 indexes. +/// Index accessor for metrics with 2 indexes. pub struct MetricPattern7 { client: Arc, name: Arc, @@ -793,12 +751,8 @@ impl AnyMetricPattern for MetricPattern7 { fn indexes(&self) -> &'static [Index] { &[ - Index::DecadeIndex, - Index::MonthIndex, - Index::QuarterIndex, - Index::SemesterIndex, - Index::WeekIndex, - Index::YearIndex, + Index::DateIndex, + Index::Height, ] } } @@ -806,12 +760,8 @@ impl AnyMetricPattern for MetricPattern7 { impl MetricPattern for MetricPattern7 { fn get(&self, index: Index) -> Option> { match index { - Index::DecadeIndex => Some(self.by.by_decadeindex()), - Index::MonthIndex => Some(self.by.by_monthindex()), - Index::QuarterIndex => Some(self.by.by_quarterindex()), - Index::SemesterIndex => Some(self.by.by_semesterindex()), - Index::WeekIndex => Some(self.by.by_weekindex()), - Index::YearIndex => Some(self.by.by_yearindex()), + Index::DateIndex => Some(self.by.by_dateindex()), + Index::Height => Some(self.by.by_height()), _ => None, } } @@ -825,21 +775,15 @@ pub struct MetricPattern8By { } impl MetricPattern8By { - pub fn by_emptyoutputindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::EmptyOutputIndex) + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) } - pub fn by_opreturnindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::OpReturnIndex) - } - pub fn by_p2msoutputindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2MSOutputIndex) - } - pub fn by_unknownoutputindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::UnknownOutputIndex) + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) } } -/// Index accessor for metrics with 4 indexes. +/// Index accessor for metrics with 2 indexes. pub struct MetricPattern8 { client: Arc, name: Arc, @@ -873,10 +817,8 @@ impl AnyMetricPattern for MetricPattern8 { fn indexes(&self) -> &'static [Index] { &[ - Index::EmptyOutputIndex, - Index::OpReturnIndex, - Index::P2MSOutputIndex, - Index::UnknownOutputIndex, + Index::DateIndex, + Index::MonthIndex, ] } } @@ -884,10 +826,8 @@ impl AnyMetricPattern for MetricPattern8 { impl MetricPattern for MetricPattern8 { fn get(&self, index: Index) -> Option> { match index { - Index::EmptyOutputIndex => Some(self.by.by_emptyoutputindex()), - Index::OpReturnIndex => Some(self.by.by_opreturnindex()), - Index::P2MSOutputIndex => Some(self.by.by_p2msoutputindex()), - Index::UnknownOutputIndex => Some(self.by.by_unknownoutputindex()), + Index::DateIndex => Some(self.by.by_dateindex()), + Index::MonthIndex => Some(self.by.by_monthindex()), _ => None, } } @@ -901,18 +841,15 @@ pub struct MetricPattern9By { } impl MetricPattern9By { - pub fn by_quarterindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) } - pub fn by_semesterindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) - } - pub fn by_yearindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) } } -/// Index accessor for metrics with 3 indexes. +/// Index accessor for metrics with 2 indexes. pub struct MetricPattern9 { client: Arc, name: Arc, @@ -946,9 +883,8 @@ impl AnyMetricPattern for MetricPattern9 { fn indexes(&self) -> &'static [Index] { &[ - Index::QuarterIndex, - Index::SemesterIndex, - Index::YearIndex, + Index::DateIndex, + Index::WeekIndex, ] } } @@ -956,9 +892,8 @@ impl AnyMetricPattern for MetricPattern9 { impl MetricPattern for MetricPattern9 { fn get(&self, index: Index) -> Option> { match index { - Index::QuarterIndex => Some(self.by.by_quarterindex()), - Index::SemesterIndex => Some(self.by.by_semesterindex()), - Index::YearIndex => Some(self.by.by_yearindex()), + Index::DateIndex => Some(self.by.by_dateindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), _ => None, } } @@ -972,11 +907,11 @@ pub struct MetricPattern10By { } impl MetricPattern10By { - pub fn by_dateindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) } - pub fn by_height(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) } } @@ -1014,8 +949,8 @@ impl AnyMetricPattern for MetricPattern10 { fn indexes(&self) -> &'static [Index] { &[ - Index::DateIndex, - Index::Height, + Index::DecadeIndex, + Index::YearIndex, ] } } @@ -1023,8 +958,8 @@ impl AnyMetricPattern for MetricPattern10 { impl MetricPattern for MetricPattern10 { fn get(&self, index: Index) -> Option> { match index { - Index::DateIndex => Some(self.by.by_dateindex()), - Index::Height => Some(self.by.by_height()), + Index::DecadeIndex => Some(self.by.by_decadeindex()), + Index::YearIndex => Some(self.by.by_yearindex()), _ => None, } } @@ -1038,11 +973,11 @@ pub struct MetricPattern11By { } impl MetricPattern11By { - pub fn by_dateindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + pub fn by_difficultyepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) } - pub fn by_monthindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) + pub fn by_halvingepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::HalvingEpoch) } } @@ -1080,8 +1015,8 @@ impl AnyMetricPattern for MetricPattern11 { fn indexes(&self) -> &'static [Index] { &[ - Index::DateIndex, - Index::MonthIndex, + Index::DifficultyEpoch, + Index::HalvingEpoch, ] } } @@ -1089,8 +1024,8 @@ impl AnyMetricPattern for MetricPattern11 { impl MetricPattern for MetricPattern11 { fn get(&self, index: Index) -> Option> { match index { - Index::DateIndex => Some(self.by.by_dateindex()), - Index::MonthIndex => Some(self.by.by_monthindex()), + Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), + Index::HalvingEpoch => Some(self.by.by_halvingepoch()), _ => None, } } @@ -1104,11 +1039,11 @@ pub struct MetricPattern12By { } impl MetricPattern12By { - pub fn by_dateindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + pub fn by_difficultyepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) } - pub fn by_weekindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) } } @@ -1146,8 +1081,8 @@ impl AnyMetricPattern for MetricPattern12 { fn indexes(&self) -> &'static [Index] { &[ - Index::DateIndex, - Index::WeekIndex, + Index::DifficultyEpoch, + Index::Height, ] } } @@ -1155,8 +1090,8 @@ impl AnyMetricPattern for MetricPattern12 { impl MetricPattern for MetricPattern12 { fn get(&self, index: Index) -> Option> { match index { - Index::DateIndex => Some(self.by.by_dateindex()), - Index::WeekIndex => Some(self.by.by_weekindex()), + Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), + Index::Height => Some(self.by.by_height()), _ => None, } } @@ -1170,11 +1105,11 @@ pub struct MetricPattern13By { } impl MetricPattern13By { - pub fn by_decadeindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + pub fn by_halvingepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::HalvingEpoch) } - pub fn by_yearindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) } } @@ -1212,8 +1147,8 @@ impl AnyMetricPattern for MetricPattern13 { fn indexes(&self) -> &'static [Index] { &[ - Index::DecadeIndex, - Index::YearIndex, + Index::HalvingEpoch, + Index::Height, ] } } @@ -1221,8 +1156,8 @@ impl AnyMetricPattern for MetricPattern13 { impl MetricPattern for MetricPattern13 { fn get(&self, index: Index) -> Option> { match index { - Index::DecadeIndex => Some(self.by.by_decadeindex()), - Index::YearIndex => Some(self.by.by_yearindex()), + Index::HalvingEpoch => Some(self.by.by_halvingepoch()), + Index::Height => Some(self.by.by_height()), _ => None, } } @@ -1236,11 +1171,11 @@ pub struct MetricPattern14By { } impl MetricPattern14By { - pub fn by_difficultyepoch(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) } - pub fn by_halvingepoch(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::HalvingEpoch) + pub fn by_txindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::TxIndex) } } @@ -1278,8 +1213,8 @@ impl AnyMetricPattern for MetricPattern14 { fn indexes(&self) -> &'static [Index] { &[ - Index::DifficultyEpoch, - Index::HalvingEpoch, + Index::Height, + Index::TxIndex, ] } } @@ -1287,8 +1222,8 @@ impl AnyMetricPattern for MetricPattern14 { impl MetricPattern for MetricPattern14 { fn get(&self, index: Index) -> Option> { match index { - Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), - Index::HalvingEpoch => Some(self.by.by_halvingepoch()), + Index::Height => Some(self.by.by_height()), + Index::TxIndex => Some(self.by.by_txindex()), _ => None, } } @@ -1302,11 +1237,11 @@ pub struct MetricPattern15By { } impl MetricPattern15By { - pub fn by_difficultyepoch(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) } - pub fn by_height(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + pub fn by_quarterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) } } @@ -1344,8 +1279,8 @@ impl AnyMetricPattern for MetricPattern15 { fn indexes(&self) -> &'static [Index] { &[ - Index::DifficultyEpoch, - Index::Height, + Index::MonthIndex, + Index::QuarterIndex, ] } } @@ -1353,8 +1288,8 @@ impl AnyMetricPattern for MetricPattern15 { impl MetricPattern for MetricPattern15 { fn get(&self, index: Index) -> Option> { match index { - Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), - Index::Height => Some(self.by.by_height()), + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::QuarterIndex => Some(self.by.by_quarterindex()), _ => None, } } @@ -1368,11 +1303,11 @@ pub struct MetricPattern16By { } impl MetricPattern16By { - pub fn by_halvingepoch(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::HalvingEpoch) + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) } - pub fn by_height(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + pub fn by_semesterindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) } } @@ -1410,8 +1345,8 @@ impl AnyMetricPattern for MetricPattern16 { fn indexes(&self) -> &'static [Index] { &[ - Index::HalvingEpoch, - Index::Height, + Index::MonthIndex, + Index::SemesterIndex, ] } } @@ -1419,8 +1354,8 @@ impl AnyMetricPattern for MetricPattern16 { impl MetricPattern for MetricPattern16 { fn get(&self, index: Index) -> Option> { match index { - Index::HalvingEpoch => Some(self.by.by_halvingepoch()), - Index::Height => Some(self.by.by_height()), + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::SemesterIndex => Some(self.by.by_semesterindex()), _ => None, } } @@ -1434,11 +1369,11 @@ pub struct MetricPattern17By { } impl MetricPattern17By { - pub fn by_height(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + pub fn by_monthindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) } - pub fn by_txindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::TxIndex) + pub fn by_weekindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) } } @@ -1476,8 +1411,8 @@ impl AnyMetricPattern for MetricPattern17 { fn indexes(&self) -> &'static [Index] { &[ - Index::Height, - Index::TxIndex, + Index::MonthIndex, + Index::WeekIndex, ] } } @@ -1485,8 +1420,8 @@ impl AnyMetricPattern for MetricPattern17 { impl MetricPattern for MetricPattern17 { fn get(&self, index: Index) -> Option> { match index { - Index::Height => Some(self.by.by_height()), - Index::TxIndex => Some(self.by.by_txindex()), + Index::MonthIndex => Some(self.by.by_monthindex()), + Index::WeekIndex => Some(self.by.by_weekindex()), _ => None, } } @@ -1503,8 +1438,8 @@ impl MetricPattern18By { pub fn by_monthindex(&self) -> Endpoint { Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) } - pub fn by_quarterindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::QuarterIndex) + pub fn by_yearindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) } } @@ -1543,7 +1478,7 @@ impl AnyMetricPattern for MetricPattern18 { fn indexes(&self) -> &'static [Index] { &[ Index::MonthIndex, - Index::QuarterIndex, + Index::YearIndex, ] } } @@ -1552,7 +1487,7 @@ impl MetricPattern for MetricPattern18 { fn get(&self, index: Index) -> Option> { match index { Index::MonthIndex => Some(self.by.by_monthindex()), - Index::QuarterIndex => Some(self.by.by_quarterindex()), + Index::YearIndex => Some(self.by.by_yearindex()), _ => None, } } @@ -1566,15 +1501,12 @@ pub struct MetricPattern19By { } impl MetricPattern19By { - pub fn by_monthindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) - } - pub fn by_semesterindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::SemesterIndex) + pub fn by_dateindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) } } -/// Index accessor for metrics with 2 indexes. +/// Index accessor for metrics with 1 indexes. pub struct MetricPattern19 { client: Arc, name: Arc, @@ -1608,8 +1540,7 @@ impl AnyMetricPattern for MetricPattern19 { fn indexes(&self) -> &'static [Index] { &[ - Index::MonthIndex, - Index::SemesterIndex, + Index::DateIndex, ] } } @@ -1617,8 +1548,7 @@ impl AnyMetricPattern for MetricPattern19 { impl MetricPattern for MetricPattern19 { fn get(&self, index: Index) -> Option> { match index { - Index::MonthIndex => Some(self.by.by_monthindex()), - Index::SemesterIndex => Some(self.by.by_semesterindex()), + Index::DateIndex => Some(self.by.by_dateindex()), _ => None, } } @@ -1632,15 +1562,12 @@ pub struct MetricPattern20By { } impl MetricPattern20By { - pub fn by_monthindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) - } - pub fn by_weekindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::WeekIndex) + pub fn by_decadeindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) } } -/// Index accessor for metrics with 2 indexes. +/// Index accessor for metrics with 1 indexes. pub struct MetricPattern20 { client: Arc, name: Arc, @@ -1674,8 +1601,7 @@ impl AnyMetricPattern for MetricPattern20 { fn indexes(&self) -> &'static [Index] { &[ - Index::MonthIndex, - Index::WeekIndex, + Index::DecadeIndex, ] } } @@ -1683,8 +1609,7 @@ impl AnyMetricPattern for MetricPattern20 { impl MetricPattern for MetricPattern20 { fn get(&self, index: Index) -> Option> { match index { - Index::MonthIndex => Some(self.by.by_monthindex()), - Index::WeekIndex => Some(self.by.by_weekindex()), + Index::DecadeIndex => Some(self.by.by_decadeindex()), _ => None, } } @@ -1698,15 +1623,12 @@ pub struct MetricPattern21By { } impl MetricPattern21By { - pub fn by_monthindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::MonthIndex) - } - pub fn by_yearindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::YearIndex) + pub fn by_difficultyepoch(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) } } -/// Index accessor for metrics with 2 indexes. +/// Index accessor for metrics with 1 indexes. pub struct MetricPattern21 { client: Arc, name: Arc, @@ -1740,8 +1662,7 @@ impl AnyMetricPattern for MetricPattern21 { fn indexes(&self) -> &'static [Index] { &[ - Index::MonthIndex, - Index::YearIndex, + Index::DifficultyEpoch, ] } } @@ -1749,8 +1670,7 @@ impl AnyMetricPattern for MetricPattern21 { impl MetricPattern for MetricPattern21 { fn get(&self, index: Index) -> Option> { match index { - Index::MonthIndex => Some(self.by.by_monthindex()), - Index::YearIndex => Some(self.by.by_yearindex()), + Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), _ => None, } } @@ -1764,8 +1684,8 @@ pub struct MetricPattern22By { } impl MetricPattern22By { - pub fn by_dateindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DateIndex) + pub fn by_emptyoutputindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::EmptyOutputIndex) } } @@ -1803,7 +1723,7 @@ impl AnyMetricPattern for MetricPattern22 { fn indexes(&self) -> &'static [Index] { &[ - Index::DateIndex, + Index::EmptyOutputIndex, ] } } @@ -1811,7 +1731,7 @@ impl AnyMetricPattern for MetricPattern22 { impl MetricPattern for MetricPattern22 { fn get(&self, index: Index) -> Option> { match index { - Index::DateIndex => Some(self.by.by_dateindex()), + Index::EmptyOutputIndex => Some(self.by.by_emptyoutputindex()), _ => None, } } @@ -1825,8 +1745,8 @@ pub struct MetricPattern23By { } impl MetricPattern23By { - pub fn by_decadeindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DecadeIndex) + pub fn by_height(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) } } @@ -1864,7 +1784,7 @@ impl AnyMetricPattern for MetricPattern23 { fn indexes(&self) -> &'static [Index] { &[ - Index::DecadeIndex, + Index::Height, ] } } @@ -1872,7 +1792,7 @@ impl AnyMetricPattern for MetricPattern23 { impl MetricPattern for MetricPattern23 { fn get(&self, index: Index) -> Option> { match index { - Index::DecadeIndex => Some(self.by.by_decadeindex()), + Index::Height => Some(self.by.by_height()), _ => None, } } @@ -1886,8 +1806,8 @@ pub struct MetricPattern24By { } impl MetricPattern24By { - pub fn by_difficultyepoch(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::DifficultyEpoch) + pub fn by_txinindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::TxInIndex) } } @@ -1925,7 +1845,7 @@ impl AnyMetricPattern for MetricPattern24 { fn indexes(&self) -> &'static [Index] { &[ - Index::DifficultyEpoch, + Index::TxInIndex, ] } } @@ -1933,7 +1853,7 @@ impl AnyMetricPattern for MetricPattern24 { impl MetricPattern for MetricPattern24 { fn get(&self, index: Index) -> Option> { match index { - Index::DifficultyEpoch => Some(self.by.by_difficultyepoch()), + Index::TxInIndex => Some(self.by.by_txinindex()), _ => None, } } @@ -1947,8 +1867,8 @@ pub struct MetricPattern25By { } impl MetricPattern25By { - pub fn by_emptyoutputindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::EmptyOutputIndex) + pub fn by_opreturnindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::OpReturnIndex) } } @@ -1986,7 +1906,7 @@ impl AnyMetricPattern for MetricPattern25 { fn indexes(&self) -> &'static [Index] { &[ - Index::EmptyOutputIndex, + Index::OpReturnIndex, ] } } @@ -1994,7 +1914,7 @@ impl AnyMetricPattern for MetricPattern25 { impl MetricPattern for MetricPattern25 { fn get(&self, index: Index) -> Option> { match index { - Index::EmptyOutputIndex => Some(self.by.by_emptyoutputindex()), + Index::OpReturnIndex => Some(self.by.by_opreturnindex()), _ => None, } } @@ -2008,8 +1928,8 @@ pub struct MetricPattern26By { } impl MetricPattern26By { - pub fn by_height(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::Height) + pub fn by_txoutindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::TxOutIndex) } } @@ -2047,7 +1967,7 @@ impl AnyMetricPattern for MetricPattern26 { fn indexes(&self) -> &'static [Index] { &[ - Index::Height, + Index::TxOutIndex, ] } } @@ -2055,7 +1975,7 @@ impl AnyMetricPattern for MetricPattern26 { impl MetricPattern for MetricPattern26 { fn get(&self, index: Index) -> Option> { match index { - Index::Height => Some(self.by.by_height()), + Index::TxOutIndex => Some(self.by.by_txoutindex()), _ => None, } } @@ -2069,8 +1989,8 @@ pub struct MetricPattern27By { } impl MetricPattern27By { - pub fn by_txinindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::TxInIndex) + pub fn by_p2aaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2AAddressIndex) } } @@ -2108,7 +2028,7 @@ impl AnyMetricPattern for MetricPattern27 { fn indexes(&self) -> &'static [Index] { &[ - Index::TxInIndex, + Index::P2AAddressIndex, ] } } @@ -2116,7 +2036,7 @@ impl AnyMetricPattern for MetricPattern27 { impl MetricPattern for MetricPattern27 { fn get(&self, index: Index) -> Option> { match index { - Index::TxInIndex => Some(self.by.by_txinindex()), + Index::P2AAddressIndex => Some(self.by.by_p2aaddressindex()), _ => None, } } @@ -2130,8 +2050,8 @@ pub struct MetricPattern28By { } impl MetricPattern28By { - pub fn by_opreturnindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::OpReturnIndex) + pub fn by_p2msoutputindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2MSOutputIndex) } } @@ -2169,7 +2089,7 @@ impl AnyMetricPattern for MetricPattern28 { fn indexes(&self) -> &'static [Index] { &[ - Index::OpReturnIndex, + Index::P2MSOutputIndex, ] } } @@ -2177,7 +2097,7 @@ impl AnyMetricPattern for MetricPattern28 { impl MetricPattern for MetricPattern28 { fn get(&self, index: Index) -> Option> { match index { - Index::OpReturnIndex => Some(self.by.by_opreturnindex()), + Index::P2MSOutputIndex => Some(self.by.by_p2msoutputindex()), _ => None, } } @@ -2191,8 +2111,8 @@ pub struct MetricPattern29By { } impl MetricPattern29By { - pub fn by_txoutindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::TxOutIndex) + pub fn by_p2pk33addressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2PK33AddressIndex) } } @@ -2230,7 +2150,7 @@ impl AnyMetricPattern for MetricPattern29 { fn indexes(&self) -> &'static [Index] { &[ - Index::TxOutIndex, + Index::P2PK33AddressIndex, ] } } @@ -2238,7 +2158,7 @@ impl AnyMetricPattern for MetricPattern29 { impl MetricPattern for MetricPattern29 { fn get(&self, index: Index) -> Option> { match index { - Index::TxOutIndex => Some(self.by.by_txoutindex()), + Index::P2PK33AddressIndex => Some(self.by.by_p2pk33addressindex()), _ => None, } } @@ -2252,8 +2172,8 @@ pub struct MetricPattern30By { } impl MetricPattern30By { - pub fn by_p2aaddressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2AAddressIndex) + pub fn by_p2pk65addressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2PK65AddressIndex) } } @@ -2291,7 +2211,7 @@ impl AnyMetricPattern for MetricPattern30 { fn indexes(&self) -> &'static [Index] { &[ - Index::P2AAddressIndex, + Index::P2PK65AddressIndex, ] } } @@ -2299,7 +2219,7 @@ impl AnyMetricPattern for MetricPattern30 { impl MetricPattern for MetricPattern30 { fn get(&self, index: Index) -> Option> { match index { - Index::P2AAddressIndex => Some(self.by.by_p2aaddressindex()), + Index::P2PK65AddressIndex => Some(self.by.by_p2pk65addressindex()), _ => None, } } @@ -2313,8 +2233,8 @@ pub struct MetricPattern31By { } impl MetricPattern31By { - pub fn by_p2msoutputindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2MSOutputIndex) + pub fn by_p2pkhaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2PKHAddressIndex) } } @@ -2352,7 +2272,7 @@ impl AnyMetricPattern for MetricPattern31 { fn indexes(&self) -> &'static [Index] { &[ - Index::P2MSOutputIndex, + Index::P2PKHAddressIndex, ] } } @@ -2360,7 +2280,7 @@ impl AnyMetricPattern for MetricPattern31 { impl MetricPattern for MetricPattern31 { fn get(&self, index: Index) -> Option> { match index { - Index::P2MSOutputIndex => Some(self.by.by_p2msoutputindex()), + Index::P2PKHAddressIndex => Some(self.by.by_p2pkhaddressindex()), _ => None, } } @@ -2374,8 +2294,8 @@ pub struct MetricPattern32By { } impl MetricPattern32By { - pub fn by_p2pk33addressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2PK33AddressIndex) + pub fn by_p2shaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2SHAddressIndex) } } @@ -2413,7 +2333,7 @@ impl AnyMetricPattern for MetricPattern32 { fn indexes(&self) -> &'static [Index] { &[ - Index::P2PK33AddressIndex, + Index::P2SHAddressIndex, ] } } @@ -2421,7 +2341,7 @@ impl AnyMetricPattern for MetricPattern32 { impl MetricPattern for MetricPattern32 { fn get(&self, index: Index) -> Option> { match index { - Index::P2PK33AddressIndex => Some(self.by.by_p2pk33addressindex()), + Index::P2SHAddressIndex => Some(self.by.by_p2shaddressindex()), _ => None, } } @@ -2435,8 +2355,8 @@ pub struct MetricPattern33By { } impl MetricPattern33By { - pub fn by_p2pk65addressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2PK65AddressIndex) + pub fn by_p2traddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2TRAddressIndex) } } @@ -2474,7 +2394,7 @@ impl AnyMetricPattern for MetricPattern33 { fn indexes(&self) -> &'static [Index] { &[ - Index::P2PK65AddressIndex, + Index::P2TRAddressIndex, ] } } @@ -2482,7 +2402,7 @@ impl AnyMetricPattern for MetricPattern33 { impl MetricPattern for MetricPattern33 { fn get(&self, index: Index) -> Option> { match index { - Index::P2PK65AddressIndex => Some(self.by.by_p2pk65addressindex()), + Index::P2TRAddressIndex => Some(self.by.by_p2traddressindex()), _ => None, } } @@ -2496,8 +2416,8 @@ pub struct MetricPattern34By { } impl MetricPattern34By { - pub fn by_p2pkhaddressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2PKHAddressIndex) + pub fn by_p2wpkhaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2WPKHAddressIndex) } } @@ -2535,7 +2455,7 @@ impl AnyMetricPattern for MetricPattern34 { fn indexes(&self) -> &'static [Index] { &[ - Index::P2PKHAddressIndex, + Index::P2WPKHAddressIndex, ] } } @@ -2543,7 +2463,7 @@ impl AnyMetricPattern for MetricPattern34 { impl MetricPattern for MetricPattern34 { fn get(&self, index: Index) -> Option> { match index { - Index::P2PKHAddressIndex => Some(self.by.by_p2pkhaddressindex()), + Index::P2WPKHAddressIndex => Some(self.by.by_p2wpkhaddressindex()), _ => None, } } @@ -2557,8 +2477,8 @@ pub struct MetricPattern35By { } impl MetricPattern35By { - pub fn by_p2shaddressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2SHAddressIndex) + pub fn by_p2wshaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::P2WSHAddressIndex) } } @@ -2596,7 +2516,7 @@ impl AnyMetricPattern for MetricPattern35 { fn indexes(&self) -> &'static [Index] { &[ - Index::P2SHAddressIndex, + Index::P2WSHAddressIndex, ] } } @@ -2604,7 +2524,7 @@ impl AnyMetricPattern for MetricPattern35 { impl MetricPattern for MetricPattern35 { fn get(&self, index: Index) -> Option> { match index { - Index::P2SHAddressIndex => Some(self.by.by_p2shaddressindex()), + Index::P2WSHAddressIndex => Some(self.by.by_p2wshaddressindex()), _ => None, } } @@ -2618,8 +2538,8 @@ pub struct MetricPattern36By { } impl MetricPattern36By { - pub fn by_p2traddressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2TRAddressIndex) + pub fn by_txindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::TxIndex) } } @@ -2657,7 +2577,7 @@ impl AnyMetricPattern for MetricPattern36 { fn indexes(&self) -> &'static [Index] { &[ - Index::P2TRAddressIndex, + Index::TxIndex, ] } } @@ -2665,7 +2585,7 @@ impl AnyMetricPattern for MetricPattern36 { impl MetricPattern for MetricPattern36 { fn get(&self, index: Index) -> Option> { match index { - Index::P2TRAddressIndex => Some(self.by.by_p2traddressindex()), + Index::TxIndex => Some(self.by.by_txindex()), _ => None, } } @@ -2679,8 +2599,8 @@ pub struct MetricPattern37By { } impl MetricPattern37By { - pub fn by_p2wpkhaddressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2WPKHAddressIndex) + pub fn by_unknownoutputindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::UnknownOutputIndex) } } @@ -2718,7 +2638,7 @@ impl AnyMetricPattern for MetricPattern37 { fn indexes(&self) -> &'static [Index] { &[ - Index::P2WPKHAddressIndex, + Index::UnknownOutputIndex, ] } } @@ -2726,7 +2646,7 @@ impl AnyMetricPattern for MetricPattern37 { impl MetricPattern for MetricPattern37 { fn get(&self, index: Index) -> Option> { match index { - Index::P2WPKHAddressIndex => Some(self.by.by_p2wpkhaddressindex()), + Index::UnknownOutputIndex => Some(self.by.by_unknownoutputindex()), _ => None, } } @@ -2740,8 +2660,8 @@ pub struct MetricPattern38By { } impl MetricPattern38By { - pub fn by_p2wshaddressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::P2WSHAddressIndex) + pub fn by_loadedaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::LoadedAddressIndex) } } @@ -2779,7 +2699,7 @@ impl AnyMetricPattern for MetricPattern38 { fn indexes(&self) -> &'static [Index] { &[ - Index::P2WSHAddressIndex, + Index::LoadedAddressIndex, ] } } @@ -2787,7 +2707,7 @@ impl AnyMetricPattern for MetricPattern38 { impl MetricPattern for MetricPattern38 { fn get(&self, index: Index) -> Option> { match index { - Index::P2WSHAddressIndex => Some(self.by.by_p2wshaddressindex()), + Index::LoadedAddressIndex => Some(self.by.by_loadedaddressindex()), _ => None, } } @@ -2801,8 +2721,8 @@ pub struct MetricPattern39By { } impl MetricPattern39By { - pub fn by_txindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::TxIndex) + pub fn by_emptyaddressindex(&self) -> Endpoint { + Endpoint::new(self.client.clone(), self.name.clone(), Index::EmptyAddressIndex) } } @@ -2838,189 +2758,6 @@ impl AnyMetricPattern for MetricPattern39 { &self.name } - fn indexes(&self) -> &'static [Index] { - &[ - Index::TxIndex, - ] - } -} - -impl MetricPattern for MetricPattern39 { - fn get(&self, index: Index) -> Option> { - match index { - Index::TxIndex => Some(self.by.by_txindex()), - _ => None, - } - } -} - -/// Container for index endpoint methods. -pub struct MetricPattern40By { - client: Arc, - name: Arc, - _marker: std::marker::PhantomData, -} - -impl MetricPattern40By { - pub fn by_unknownoutputindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::UnknownOutputIndex) - } -} - -/// Index accessor for metrics with 1 indexes. -pub struct MetricPattern40 { - client: Arc, - name: Arc, - pub by: MetricPattern40By, -} - -impl MetricPattern40 { - pub fn new(client: Arc, name: String) -> Self { - let name: Arc = name.into(); - Self { - client: client.clone(), - name: name.clone(), - by: MetricPattern40By { - client, - name, - _marker: std::marker::PhantomData, - } - } - } - - /// Get the metric name. - pub fn name(&self) -> &str { - &self.name - } -} - -impl AnyMetricPattern for MetricPattern40 { - fn name(&self) -> &str { - &self.name - } - - fn indexes(&self) -> &'static [Index] { - &[ - Index::UnknownOutputIndex, - ] - } -} - -impl MetricPattern for MetricPattern40 { - fn get(&self, index: Index) -> Option> { - match index { - Index::UnknownOutputIndex => Some(self.by.by_unknownoutputindex()), - _ => None, - } - } -} - -/// Container for index endpoint methods. -pub struct MetricPattern41By { - client: Arc, - name: Arc, - _marker: std::marker::PhantomData, -} - -impl MetricPattern41By { - pub fn by_loadedaddressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::LoadedAddressIndex) - } -} - -/// Index accessor for metrics with 1 indexes. -pub struct MetricPattern41 { - client: Arc, - name: Arc, - pub by: MetricPattern41By, -} - -impl MetricPattern41 { - pub fn new(client: Arc, name: String) -> Self { - let name: Arc = name.into(); - Self { - client: client.clone(), - name: name.clone(), - by: MetricPattern41By { - client, - name, - _marker: std::marker::PhantomData, - } - } - } - - /// Get the metric name. - pub fn name(&self) -> &str { - &self.name - } -} - -impl AnyMetricPattern for MetricPattern41 { - fn name(&self) -> &str { - &self.name - } - - fn indexes(&self) -> &'static [Index] { - &[ - Index::LoadedAddressIndex, - ] - } -} - -impl MetricPattern for MetricPattern41 { - fn get(&self, index: Index) -> Option> { - match index { - Index::LoadedAddressIndex => Some(self.by.by_loadedaddressindex()), - _ => None, - } - } -} - -/// Container for index endpoint methods. -pub struct MetricPattern42By { - client: Arc, - name: Arc, - _marker: std::marker::PhantomData, -} - -impl MetricPattern42By { - pub fn by_emptyaddressindex(&self) -> Endpoint { - Endpoint::new(self.client.clone(), self.name.clone(), Index::EmptyAddressIndex) - } -} - -/// Index accessor for metrics with 1 indexes. -pub struct MetricPattern42 { - client: Arc, - name: Arc, - pub by: MetricPattern42By, -} - -impl MetricPattern42 { - pub fn new(client: Arc, name: String) -> Self { - let name: Arc = name.into(); - Self { - client: client.clone(), - name: name.clone(), - by: MetricPattern42By { - client, - name, - _marker: std::marker::PhantomData, - } - } - } - - /// Get the metric name. - pub fn name(&self) -> &str { - &self.name - } -} - -impl AnyMetricPattern for MetricPattern42 { - fn name(&self) -> &str { - &self.name - } - fn indexes(&self) -> &'static [Index] { &[ Index::EmptyAddressIndex, @@ -3028,7 +2765,7 @@ impl AnyMetricPattern for MetricPattern42 { } } -impl MetricPattern for MetricPattern42 { +impl MetricPattern for MetricPattern39 { fn get(&self, index: Index) -> Option> { match index { Index::EmptyAddressIndex => Some(self.by.by_emptyaddressindex()), @@ -3041,20 +2778,20 @@ impl MetricPattern for MetricPattern42 { /// Pattern struct for repeated tree structure. pub struct RealizedPattern3 { - pub adjusted_sopr: MetricPattern22, - pub adjusted_sopr_30d_ema: MetricPattern22, - pub adjusted_sopr_7d_ema: MetricPattern22, + pub adjusted_sopr: MetricPattern19, + pub adjusted_sopr_30d_ema: MetricPattern19, + pub adjusted_sopr_7d_ema: MetricPattern19, pub adjusted_value_created: MetricPattern1, pub adjusted_value_destroyed: MetricPattern1, - pub mvrv: MetricPattern5, + pub mvrv: MetricPattern4, pub neg_realized_loss: BlockCountPattern, pub net_realized_pnl: BlockCountPattern, - pub net_realized_pnl_cumulative_30d_delta: MetricPattern5, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, pub net_realized_pnl_rel_to_realized_cap: BlockCountPattern, pub realized_cap: MetricPattern1, - pub realized_cap_30d_delta: MetricPattern5, + pub realized_cap_30d_delta: MetricPattern4, pub realized_cap_rel_to_own_market_cap: MetricPattern1, pub realized_loss: BlockCountPattern, pub realized_loss_rel_to_realized_cap: BlockCountPattern, @@ -3062,39 +2799,37 @@ pub struct RealizedPattern3 { pub realized_price_extra: ActivePriceRatioPattern, pub realized_profit: BlockCountPattern, pub realized_profit_rel_to_realized_cap: BlockCountPattern, - pub realized_profit_to_loss_ratio: MetricPattern22, - pub realized_value: DifficultyAdjustmentPattern, - pub sell_side_risk_ratio: MetricPattern22, - pub sell_side_risk_ratio_30d_ema: MetricPattern22, - pub sell_side_risk_ratio_7d_ema: MetricPattern22, - pub sopr: MetricPattern22, - pub sopr_30d_ema: MetricPattern22, - pub sopr_7d_ema: MetricPattern22, + pub realized_profit_to_loss_ratio: MetricPattern19, + pub realized_value: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern19, + pub sell_side_risk_ratio_30d_ema: MetricPattern19, + pub sell_side_risk_ratio_7d_ema: MetricPattern19, + pub sopr: MetricPattern19, + pub sopr_30d_ema: MetricPattern19, + pub sopr_7d_ema: MetricPattern19, pub total_realized_pnl: MetricPattern1, - pub value_created: MetricPattern26, - pub value_created_sum: MetricPattern2, - pub value_destroyed: MetricPattern26, - pub value_destroyed_sum: MetricPattern2, + pub value_created: MetricPattern1, + pub value_destroyed: MetricPattern1, } impl RealizedPattern3 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - adjusted_sopr: MetricPattern22::new(client.clone(), _m(&acc, "adjusted_sopr")), - adjusted_sopr_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), - adjusted_sopr_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), + adjusted_sopr: MetricPattern19::new(client.clone(), _m(&acc, "adjusted_sopr")), + adjusted_sopr_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), + adjusted_sopr_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), adjusted_value_created: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created")), adjusted_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed")), - mvrv: MetricPattern5::new(client.clone(), _m(&acc, "mvrv")), + mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern5::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), realized_cap_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_rel_to_own_market_cap")), realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), realized_loss_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), @@ -3102,110 +2837,172 @@ impl RealizedPattern3 { realized_price_extra: ActivePriceRatioPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), realized_profit_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), - realized_profit_to_loss_ratio: MetricPattern22::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), - realized_value: DifficultyAdjustmentPattern::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), - sopr: MetricPattern22::new(client.clone(), _m(&acc, "sopr")), - sopr_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sopr_30d_ema")), - sopr_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sopr_7d_ema")), + realized_profit_to_loss_ratio: MetricPattern19::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), + realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), + sell_side_risk_ratio: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sopr: MetricPattern19::new(client.clone(), _m(&acc, "sopr")), + sopr_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sopr_7d_ema")), total_realized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_realized_pnl")), - value_created: MetricPattern26::new(client.clone(), _m(&acc, "value_created")), - value_created_sum: MetricPattern2::new(client.clone(), _m(&acc, "value_created_sum")), - value_destroyed: MetricPattern26::new(client.clone(), _m(&acc, "value_destroyed")), - value_destroyed_sum: MetricPattern2::new(client.clone(), _m(&acc, "value_destroyed_sum")), + value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), } } } /// Pattern struct for repeated tree structure. pub struct RealizedPattern4 { - pub adjusted_sopr: MetricPattern22, - pub adjusted_sopr_30d_ema: MetricPattern22, - pub adjusted_sopr_7d_ema: MetricPattern22, + pub adjusted_sopr: MetricPattern19, + pub adjusted_sopr_30d_ema: MetricPattern19, + pub adjusted_sopr_7d_ema: MetricPattern19, pub adjusted_value_created: MetricPattern1, pub adjusted_value_destroyed: MetricPattern1, - pub mvrv: MetricPattern5, + pub mvrv: MetricPattern4, pub neg_realized_loss: BlockCountPattern, pub net_realized_pnl: BlockCountPattern, - pub net_realized_pnl_cumulative_30d_delta: MetricPattern5, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, pub net_realized_pnl_rel_to_realized_cap: BlockCountPattern, pub realized_cap: MetricPattern1, - pub realized_cap_30d_delta: MetricPattern5, + pub realized_cap_30d_delta: MetricPattern4, pub realized_loss: BlockCountPattern, pub realized_loss_rel_to_realized_cap: BlockCountPattern, pub realized_price: MetricPattern1, pub realized_price_extra: RealizedPriceExtraPattern, pub realized_profit: BlockCountPattern, pub realized_profit_rel_to_realized_cap: BlockCountPattern, - pub realized_value: DifficultyAdjustmentPattern, - pub sell_side_risk_ratio: MetricPattern22, - pub sell_side_risk_ratio_30d_ema: MetricPattern22, - pub sell_side_risk_ratio_7d_ema: MetricPattern22, - pub sopr: MetricPattern22, - pub sopr_30d_ema: MetricPattern22, - pub sopr_7d_ema: MetricPattern22, + pub realized_value: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern19, + pub sell_side_risk_ratio_30d_ema: MetricPattern19, + pub sell_side_risk_ratio_7d_ema: MetricPattern19, + pub sopr: MetricPattern19, + pub sopr_30d_ema: MetricPattern19, + pub sopr_7d_ema: MetricPattern19, pub total_realized_pnl: MetricPattern1, - pub value_created: MetricPattern26, - pub value_created_sum: MetricPattern2, - pub value_destroyed: MetricPattern26, - pub value_destroyed_sum: MetricPattern2, + pub value_created: MetricPattern1, + pub value_destroyed: MetricPattern1, } impl RealizedPattern4 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - adjusted_sopr: MetricPattern22::new(client.clone(), _m(&acc, "adjusted_sopr")), - adjusted_sopr_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), - adjusted_sopr_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), + adjusted_sopr: MetricPattern19::new(client.clone(), _m(&acc, "adjusted_sopr")), + adjusted_sopr_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), + adjusted_sopr_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), adjusted_value_created: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created")), adjusted_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed")), - mvrv: MetricPattern5::new(client.clone(), _m(&acc, "mvrv")), + mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern5::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), realized_loss_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), realized_price: MetricPattern1::new(client.clone(), _m(&acc, "realized_price")), realized_price_extra: RealizedPriceExtraPattern::new(client.clone(), _m(&acc, "realized_price")), realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), realized_profit_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), - realized_value: DifficultyAdjustmentPattern::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), - sopr: MetricPattern22::new(client.clone(), _m(&acc, "sopr")), - sopr_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sopr_30d_ema")), - sopr_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sopr_7d_ema")), + realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), + sell_side_risk_ratio: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sopr: MetricPattern19::new(client.clone(), _m(&acc, "sopr")), + sopr_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sopr_7d_ema")), total_realized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_realized_pnl")), - value_created: MetricPattern26::new(client.clone(), _m(&acc, "value_created")), - value_created_sum: MetricPattern2::new(client.clone(), _m(&acc, "value_created_sum")), - value_destroyed: MetricPattern26::new(client.clone(), _m(&acc, "value_destroyed")), - value_destroyed_sum: MetricPattern2::new(client.clone(), _m(&acc, "value_destroyed_sum")), + value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct Ratio1ySdPattern { + pub _0sd_usd: MetricPattern4, + pub m0_5sd: MetricPattern4, + pub m0_5sd_usd: MetricPattern4, + pub m1_5sd: MetricPattern4, + pub m1_5sd_usd: MetricPattern4, + pub m1sd: MetricPattern4, + pub m1sd_usd: MetricPattern4, + pub m2_5sd: MetricPattern4, + pub m2_5sd_usd: MetricPattern4, + pub m2sd: MetricPattern4, + pub m2sd_usd: MetricPattern4, + pub m3sd: MetricPattern4, + pub m3sd_usd: MetricPattern4, + pub p0_5sd: MetricPattern4, + pub p0_5sd_usd: MetricPattern4, + pub p1_5sd: MetricPattern4, + pub p1_5sd_usd: MetricPattern4, + pub p1sd: MetricPattern4, + pub p1sd_usd: MetricPattern4, + pub p2_5sd: MetricPattern4, + pub p2_5sd_usd: MetricPattern4, + pub p2sd: MetricPattern4, + pub p2sd_usd: MetricPattern4, + pub p3sd: MetricPattern4, + pub p3sd_usd: MetricPattern4, + pub sd: MetricPattern4, + pub sma: MetricPattern4, + pub zscore: MetricPattern4, +} + +impl Ratio1ySdPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + _0sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "0sd_usd")), + m0_5sd: MetricPattern4::new(client.clone(), _m(&acc, "m0_5sd")), + m0_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m0_5sd_usd")), + m1_5sd: MetricPattern4::new(client.clone(), _m(&acc, "m1_5sd")), + m1_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m1_5sd_usd")), + m1sd: MetricPattern4::new(client.clone(), _m(&acc, "m1sd")), + m1sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m1sd_usd")), + m2_5sd: MetricPattern4::new(client.clone(), _m(&acc, "m2_5sd")), + m2_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m2_5sd_usd")), + m2sd: MetricPattern4::new(client.clone(), _m(&acc, "m2sd")), + m2sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m2sd_usd")), + m3sd: MetricPattern4::new(client.clone(), _m(&acc, "m3sd")), + m3sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "m3sd_usd")), + p0_5sd: MetricPattern4::new(client.clone(), _m(&acc, "p0_5sd")), + p0_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p0_5sd_usd")), + p1_5sd: MetricPattern4::new(client.clone(), _m(&acc, "p1_5sd")), + p1_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p1_5sd_usd")), + p1sd: MetricPattern4::new(client.clone(), _m(&acc, "p1sd")), + p1sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p1sd_usd")), + p2_5sd: MetricPattern4::new(client.clone(), _m(&acc, "p2_5sd")), + p2_5sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p2_5sd_usd")), + p2sd: MetricPattern4::new(client.clone(), _m(&acc, "p2sd")), + p2sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p2sd_usd")), + p3sd: MetricPattern4::new(client.clone(), _m(&acc, "p3sd")), + p3sd_usd: MetricPattern4::new(client.clone(), _m(&acc, "p3sd_usd")), + sd: MetricPattern4::new(client.clone(), _m(&acc, "sd")), + sma: MetricPattern4::new(client.clone(), _m(&acc, "sma")), + zscore: MetricPattern4::new(client.clone(), _m(&acc, "zscore")), } } } /// Pattern struct for repeated tree structure. pub struct RealizedPattern2 { - pub mvrv: MetricPattern5, + pub mvrv: MetricPattern4, pub neg_realized_loss: BlockCountPattern, pub net_realized_pnl: BlockCountPattern, - pub net_realized_pnl_cumulative_30d_delta: MetricPattern5, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, pub net_realized_pnl_rel_to_realized_cap: BlockCountPattern, pub realized_cap: MetricPattern1, - pub realized_cap_30d_delta: MetricPattern5, + pub realized_cap_30d_delta: MetricPattern4, pub realized_cap_rel_to_own_market_cap: MetricPattern1, pub realized_loss: BlockCountPattern, pub realized_loss_rel_to_realized_cap: BlockCountPattern, @@ -3213,34 +3010,32 @@ pub struct RealizedPattern2 { pub realized_price_extra: ActivePriceRatioPattern, pub realized_profit: BlockCountPattern, pub realized_profit_rel_to_realized_cap: BlockCountPattern, - pub realized_profit_to_loss_ratio: MetricPattern22, - pub realized_value: DifficultyAdjustmentPattern, - pub sell_side_risk_ratio: MetricPattern22, - pub sell_side_risk_ratio_30d_ema: MetricPattern22, - pub sell_side_risk_ratio_7d_ema: MetricPattern22, - pub sopr: MetricPattern22, - pub sopr_30d_ema: MetricPattern22, - pub sopr_7d_ema: MetricPattern22, + pub realized_profit_to_loss_ratio: MetricPattern19, + pub realized_value: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern19, + pub sell_side_risk_ratio_30d_ema: MetricPattern19, + pub sell_side_risk_ratio_7d_ema: MetricPattern19, + pub sopr: MetricPattern19, + pub sopr_30d_ema: MetricPattern19, + pub sopr_7d_ema: MetricPattern19, pub total_realized_pnl: MetricPattern1, - pub value_created: MetricPattern26, - pub value_created_sum: MetricPattern2, - pub value_destroyed: MetricPattern26, - pub value_destroyed_sum: MetricPattern2, + pub value_created: MetricPattern1, + pub value_destroyed: MetricPattern1, } impl RealizedPattern2 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - mvrv: MetricPattern5::new(client.clone(), _m(&acc, "mvrv")), + mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern5::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), realized_cap_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_rel_to_own_market_cap")), realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), realized_loss_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), @@ -3248,178 +3043,104 @@ impl RealizedPattern2 { realized_price_extra: ActivePriceRatioPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), realized_profit_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), - realized_profit_to_loss_ratio: MetricPattern22::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), - realized_value: DifficultyAdjustmentPattern::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), - sopr: MetricPattern22::new(client.clone(), _m(&acc, "sopr")), - sopr_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sopr_30d_ema")), - sopr_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sopr_7d_ema")), + realized_profit_to_loss_ratio: MetricPattern19::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), + realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), + sell_side_risk_ratio: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sopr: MetricPattern19::new(client.clone(), _m(&acc, "sopr")), + sopr_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sopr_7d_ema")), total_realized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_realized_pnl")), - value_created: MetricPattern26::new(client.clone(), _m(&acc, "value_created")), - value_created_sum: MetricPattern2::new(client.clone(), _m(&acc, "value_created_sum")), - value_destroyed: MetricPattern26::new(client.clone(), _m(&acc, "value_destroyed")), - value_destroyed_sum: MetricPattern2::new(client.clone(), _m(&acc, "value_destroyed_sum")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct Ratio1ySdPattern { - pub _0sd_usd: MetricPattern5, - pub m0_5sd: MetricPattern5, - pub m0_5sd_usd: MetricPattern5, - pub m1_5sd: MetricPattern5, - pub m1_5sd_usd: MetricPattern5, - pub m1sd: MetricPattern5, - pub m1sd_usd: MetricPattern5, - pub m2_5sd: MetricPattern5, - pub m2_5sd_usd: MetricPattern5, - pub m2sd: MetricPattern5, - pub m2sd_usd: MetricPattern5, - pub m3sd: MetricPattern5, - pub m3sd_usd: MetricPattern5, - pub p0_5sd: MetricPattern5, - pub p0_5sd_usd: MetricPattern5, - pub p1_5sd: MetricPattern5, - pub p1_5sd_usd: MetricPattern5, - pub p1sd: MetricPattern5, - pub p1sd_usd: MetricPattern5, - pub p2_5sd: MetricPattern5, - pub p2_5sd_usd: MetricPattern5, - pub p2sd: MetricPattern5, - pub p2sd_usd: MetricPattern5, - pub p3sd: MetricPattern5, - pub p3sd_usd: MetricPattern5, - pub sd: MetricPattern5, - pub sma: MetricPattern5, - pub zscore: MetricPattern5, -} - -impl Ratio1ySdPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - _0sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "0sd_usd")), - m0_5sd: MetricPattern5::new(client.clone(), _m(&acc, "m0_5sd")), - m0_5sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "m0_5sd_usd")), - m1_5sd: MetricPattern5::new(client.clone(), _m(&acc, "m1_5sd")), - m1_5sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "m1_5sd_usd")), - m1sd: MetricPattern5::new(client.clone(), _m(&acc, "m1sd")), - m1sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "m1sd_usd")), - m2_5sd: MetricPattern5::new(client.clone(), _m(&acc, "m2_5sd")), - m2_5sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "m2_5sd_usd")), - m2sd: MetricPattern5::new(client.clone(), _m(&acc, "m2sd")), - m2sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "m2sd_usd")), - m3sd: MetricPattern5::new(client.clone(), _m(&acc, "m3sd")), - m3sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "m3sd_usd")), - p0_5sd: MetricPattern5::new(client.clone(), _m(&acc, "p0_5sd")), - p0_5sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "p0_5sd_usd")), - p1_5sd: MetricPattern5::new(client.clone(), _m(&acc, "p1_5sd")), - p1_5sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "p1_5sd_usd")), - p1sd: MetricPattern5::new(client.clone(), _m(&acc, "p1sd")), - p1sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "p1sd_usd")), - p2_5sd: MetricPattern5::new(client.clone(), _m(&acc, "p2_5sd")), - p2_5sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "p2_5sd_usd")), - p2sd: MetricPattern5::new(client.clone(), _m(&acc, "p2sd")), - p2sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "p2sd_usd")), - p3sd: MetricPattern5::new(client.clone(), _m(&acc, "p3sd")), - p3sd_usd: MetricPattern5::new(client.clone(), _m(&acc, "p3sd_usd")), - sd: MetricPattern5::new(client.clone(), _m(&acc, "sd")), - sma: MetricPattern5::new(client.clone(), _m(&acc, "sma")), - zscore: MetricPattern5::new(client.clone(), _m(&acc, "zscore")), + value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), } } } /// Pattern struct for repeated tree structure. pub struct RealizedPattern { - pub mvrv: MetricPattern5, + pub mvrv: MetricPattern4, pub neg_realized_loss: BlockCountPattern, pub net_realized_pnl: BlockCountPattern, - pub net_realized_pnl_cumulative_30d_delta: MetricPattern5, - pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5, - pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5, + pub net_realized_pnl_cumulative_30d_delta: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4, pub net_realized_pnl_rel_to_realized_cap: BlockCountPattern, pub realized_cap: MetricPattern1, - pub realized_cap_30d_delta: MetricPattern5, + pub realized_cap_30d_delta: MetricPattern4, pub realized_loss: BlockCountPattern, pub realized_loss_rel_to_realized_cap: BlockCountPattern, pub realized_price: MetricPattern1, pub realized_price_extra: RealizedPriceExtraPattern, pub realized_profit: BlockCountPattern, pub realized_profit_rel_to_realized_cap: BlockCountPattern, - pub realized_value: DifficultyAdjustmentPattern, - pub sell_side_risk_ratio: MetricPattern22, - pub sell_side_risk_ratio_30d_ema: MetricPattern22, - pub sell_side_risk_ratio_7d_ema: MetricPattern22, - pub sopr: MetricPattern22, - pub sopr_30d_ema: MetricPattern22, - pub sopr_7d_ema: MetricPattern22, + pub realized_value: MetricPattern1, + pub sell_side_risk_ratio: MetricPattern19, + pub sell_side_risk_ratio_30d_ema: MetricPattern19, + pub sell_side_risk_ratio_7d_ema: MetricPattern19, + pub sopr: MetricPattern19, + pub sopr_30d_ema: MetricPattern19, + pub sopr_7d_ema: MetricPattern19, pub total_realized_pnl: MetricPattern1, - pub value_created: MetricPattern26, - pub value_created_sum: MetricPattern2, - pub value_destroyed: MetricPattern26, - pub value_destroyed_sum: MetricPattern2, + pub value_created: MetricPattern1, + pub value_destroyed: MetricPattern1, } impl RealizedPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - mvrv: MetricPattern5::new(client.clone(), _m(&acc, "mvrv")), + mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern5::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), realized_loss_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), realized_price: MetricPattern1::new(client.clone(), _m(&acc, "realized_price")), realized_price_extra: RealizedPriceExtraPattern::new(client.clone(), _m(&acc, "realized_price")), realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), realized_profit_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), - realized_value: DifficultyAdjustmentPattern::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), - sopr: MetricPattern22::new(client.clone(), _m(&acc, "sopr")), - sopr_30d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sopr_30d_ema")), - sopr_7d_ema: MetricPattern22::new(client.clone(), _m(&acc, "sopr_7d_ema")), + realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), + sell_side_risk_ratio: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), + sell_side_risk_ratio_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), + sell_side_risk_ratio_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sopr: MetricPattern19::new(client.clone(), _m(&acc, "sopr")), + sopr_30d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sopr_30d_ema")), + sopr_7d_ema: MetricPattern19::new(client.clone(), _m(&acc, "sopr_7d_ema")), total_realized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_realized_pnl")), - value_created: MetricPattern26::new(client.clone(), _m(&acc, "value_created")), - value_created_sum: MetricPattern2::new(client.clone(), _m(&acc, "value_created_sum")), - value_destroyed: MetricPattern26::new(client.clone(), _m(&acc, "value_destroyed")), - value_destroyed_sum: MetricPattern2::new(client.clone(), _m(&acc, "value_destroyed_sum")), + value_created: MetricPattern1::new(client.clone(), _m(&acc, "value_created")), + value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "value_destroyed")), } } } /// Pattern struct for repeated tree structure. pub struct Price111dSmaPattern { - pub price: MetricPattern5, - pub ratio: MetricPattern5, - pub ratio_1m_sma: MetricPattern5, - pub ratio_1w_sma: MetricPattern5, + pub price: MetricPattern4, + pub ratio: MetricPattern4, + pub ratio_1m_sma: MetricPattern4, + pub ratio_1w_sma: MetricPattern4, pub ratio_1y_sd: Ratio1ySdPattern, pub ratio_2y_sd: Ratio1ySdPattern, pub ratio_4y_sd: Ratio1ySdPattern, - pub ratio_pct1: MetricPattern5, - pub ratio_pct1_usd: MetricPattern5, - pub ratio_pct2: MetricPattern5, - pub ratio_pct2_usd: MetricPattern5, - pub ratio_pct5: MetricPattern5, - pub ratio_pct5_usd: MetricPattern5, - pub ratio_pct95: MetricPattern5, - pub ratio_pct95_usd: MetricPattern5, - pub ratio_pct98: MetricPattern5, - pub ratio_pct98_usd: MetricPattern5, - pub ratio_pct99: MetricPattern5, - pub ratio_pct99_usd: MetricPattern5, + pub ratio_pct1: MetricPattern4, + pub ratio_pct1_usd: MetricPattern4, + pub ratio_pct2: MetricPattern4, + pub ratio_pct2_usd: MetricPattern4, + pub ratio_pct5: MetricPattern4, + pub ratio_pct5_usd: MetricPattern4, + pub ratio_pct95: MetricPattern4, + pub ratio_pct95_usd: MetricPattern4, + pub ratio_pct98: MetricPattern4, + pub ratio_pct98_usd: MetricPattern4, + pub ratio_pct99: MetricPattern4, + pub ratio_pct99_usd: MetricPattern4, pub ratio_sd: Ratio1ySdPattern, } @@ -3427,100 +3148,50 @@ impl Price111dSmaPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - price: MetricPattern5::new(client.clone(), acc.clone()), - ratio: MetricPattern5::new(client.clone(), _m(&acc, "ratio")), - ratio_1m_sma: MetricPattern5::new(client.clone(), _m(&acc, "ratio_1m_sma")), - ratio_1w_sma: MetricPattern5::new(client.clone(), _m(&acc, "ratio_1w_sma")), + price: MetricPattern4::new(client.clone(), acc.clone()), + ratio: MetricPattern4::new(client.clone(), _m(&acc, "ratio")), + ratio_1m_sma: MetricPattern4::new(client.clone(), _m(&acc, "ratio_1m_sma")), + ratio_1w_sma: MetricPattern4::new(client.clone(), _m(&acc, "ratio_1w_sma")), ratio_1y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "ratio_1y")), ratio_2y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "ratio_2y")), ratio_4y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "ratio_4y")), - ratio_pct1: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct1")), - ratio_pct1_usd: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct1_usd")), - ratio_pct2: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct2")), - ratio_pct2_usd: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct2_usd")), - ratio_pct5: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct5")), - ratio_pct5_usd: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct5_usd")), - ratio_pct95: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct95")), - ratio_pct95_usd: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct95_usd")), - ratio_pct98: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct98")), - ratio_pct98_usd: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct98_usd")), - ratio_pct99: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct99")), - ratio_pct99_usd: MetricPattern5::new(client.clone(), _m(&acc, "ratio_pct99_usd")), + ratio_pct1: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct1")), + ratio_pct1_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct1_usd")), + ratio_pct2: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct2")), + ratio_pct2_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct2_usd")), + ratio_pct5: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct5")), + ratio_pct5_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct5_usd")), + ratio_pct95: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct95")), + ratio_pct95_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct95_usd")), + ratio_pct98: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct98")), + ratio_pct98_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct98_usd")), + ratio_pct99: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct99")), + ratio_pct99_usd: MetricPattern4::new(client.clone(), _m(&acc, "ratio_pct99_usd")), ratio_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "ratio")), } } } -/// Pattern struct for repeated tree structure. -pub struct PercentilesPattern2 { - pub cost_basis_pct05: MetricPattern5, - pub cost_basis_pct10: MetricPattern5, - pub cost_basis_pct15: MetricPattern5, - pub cost_basis_pct20: MetricPattern5, - pub cost_basis_pct25: MetricPattern5, - pub cost_basis_pct30: MetricPattern5, - pub cost_basis_pct35: MetricPattern5, - pub cost_basis_pct40: MetricPattern5, - pub cost_basis_pct45: MetricPattern5, - pub cost_basis_pct50: MetricPattern5, - pub cost_basis_pct55: MetricPattern5, - pub cost_basis_pct60: MetricPattern5, - pub cost_basis_pct65: MetricPattern5, - pub cost_basis_pct70: MetricPattern5, - pub cost_basis_pct75: MetricPattern5, - pub cost_basis_pct80: MetricPattern5, - pub cost_basis_pct85: MetricPattern5, - pub cost_basis_pct90: MetricPattern5, - pub cost_basis_pct95: MetricPattern5, -} - -impl PercentilesPattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - cost_basis_pct05: MetricPattern5::new(client.clone(), _m(&acc, "pct05")), - cost_basis_pct10: MetricPattern5::new(client.clone(), _m(&acc, "pct10")), - cost_basis_pct15: MetricPattern5::new(client.clone(), _m(&acc, "pct15")), - cost_basis_pct20: MetricPattern5::new(client.clone(), _m(&acc, "pct20")), - cost_basis_pct25: MetricPattern5::new(client.clone(), _m(&acc, "pct25")), - cost_basis_pct30: MetricPattern5::new(client.clone(), _m(&acc, "pct30")), - cost_basis_pct35: MetricPattern5::new(client.clone(), _m(&acc, "pct35")), - cost_basis_pct40: MetricPattern5::new(client.clone(), _m(&acc, "pct40")), - cost_basis_pct45: MetricPattern5::new(client.clone(), _m(&acc, "pct45")), - cost_basis_pct50: MetricPattern5::new(client.clone(), _m(&acc, "pct50")), - cost_basis_pct55: MetricPattern5::new(client.clone(), _m(&acc, "pct55")), - cost_basis_pct60: MetricPattern5::new(client.clone(), _m(&acc, "pct60")), - cost_basis_pct65: MetricPattern5::new(client.clone(), _m(&acc, "pct65")), - cost_basis_pct70: MetricPattern5::new(client.clone(), _m(&acc, "pct70")), - cost_basis_pct75: MetricPattern5::new(client.clone(), _m(&acc, "pct75")), - cost_basis_pct80: MetricPattern5::new(client.clone(), _m(&acc, "pct80")), - cost_basis_pct85: MetricPattern5::new(client.clone(), _m(&acc, "pct85")), - cost_basis_pct90: MetricPattern5::new(client.clone(), _m(&acc, "pct90")), - cost_basis_pct95: MetricPattern5::new(client.clone(), _m(&acc, "pct95")), - } - } -} - /// Pattern struct for repeated tree structure. pub struct ActivePriceRatioPattern { - pub ratio: MetricPattern5, - pub ratio_1m_sma: MetricPattern5, - pub ratio_1w_sma: MetricPattern5, + pub ratio: MetricPattern4, + pub ratio_1m_sma: MetricPattern4, + pub ratio_1w_sma: MetricPattern4, pub ratio_1y_sd: Ratio1ySdPattern, pub ratio_2y_sd: Ratio1ySdPattern, pub ratio_4y_sd: Ratio1ySdPattern, - pub ratio_pct1: MetricPattern5, - pub ratio_pct1_usd: MetricPattern5, - pub ratio_pct2: MetricPattern5, - pub ratio_pct2_usd: MetricPattern5, - pub ratio_pct5: MetricPattern5, - pub ratio_pct5_usd: MetricPattern5, - pub ratio_pct95: MetricPattern5, - pub ratio_pct95_usd: MetricPattern5, - pub ratio_pct98: MetricPattern5, - pub ratio_pct98_usd: MetricPattern5, - pub ratio_pct99: MetricPattern5, - pub ratio_pct99_usd: MetricPattern5, + pub ratio_pct1: MetricPattern4, + pub ratio_pct1_usd: MetricPattern4, + pub ratio_pct2: MetricPattern4, + pub ratio_pct2_usd: MetricPattern4, + pub ratio_pct5: MetricPattern4, + pub ratio_pct5_usd: MetricPattern4, + pub ratio_pct95: MetricPattern4, + pub ratio_pct95_usd: MetricPattern4, + pub ratio_pct98: MetricPattern4, + pub ratio_pct98_usd: MetricPattern4, + pub ratio_pct99: MetricPattern4, + pub ratio_pct99_usd: MetricPattern4, pub ratio_sd: Ratio1ySdPattern, } @@ -3528,29 +3199,79 @@ impl ActivePriceRatioPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - ratio: MetricPattern5::new(client.clone(), acc.clone()), - ratio_1m_sma: MetricPattern5::new(client.clone(), _m(&acc, "1m_sma")), - ratio_1w_sma: MetricPattern5::new(client.clone(), _m(&acc, "1w_sma")), + ratio: MetricPattern4::new(client.clone(), acc.clone()), + ratio_1m_sma: MetricPattern4::new(client.clone(), _m(&acc, "1m_sma")), + ratio_1w_sma: MetricPattern4::new(client.clone(), _m(&acc, "1w_sma")), ratio_1y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "1y")), ratio_2y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "2y")), ratio_4y_sd: Ratio1ySdPattern::new(client.clone(), _m(&acc, "4y")), - ratio_pct1: MetricPattern5::new(client.clone(), _m(&acc, "pct1")), - ratio_pct1_usd: MetricPattern5::new(client.clone(), _m(&acc, "pct1_usd")), - ratio_pct2: MetricPattern5::new(client.clone(), _m(&acc, "pct2")), - ratio_pct2_usd: MetricPattern5::new(client.clone(), _m(&acc, "pct2_usd")), - ratio_pct5: MetricPattern5::new(client.clone(), _m(&acc, "pct5")), - ratio_pct5_usd: MetricPattern5::new(client.clone(), _m(&acc, "pct5_usd")), - ratio_pct95: MetricPattern5::new(client.clone(), _m(&acc, "pct95")), - ratio_pct95_usd: MetricPattern5::new(client.clone(), _m(&acc, "pct95_usd")), - ratio_pct98: MetricPattern5::new(client.clone(), _m(&acc, "pct98")), - ratio_pct98_usd: MetricPattern5::new(client.clone(), _m(&acc, "pct98_usd")), - ratio_pct99: MetricPattern5::new(client.clone(), _m(&acc, "pct99")), - ratio_pct99_usd: MetricPattern5::new(client.clone(), _m(&acc, "pct99_usd")), + ratio_pct1: MetricPattern4::new(client.clone(), _m(&acc, "pct1")), + ratio_pct1_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct1_usd")), + ratio_pct2: MetricPattern4::new(client.clone(), _m(&acc, "pct2")), + ratio_pct2_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct2_usd")), + ratio_pct5: MetricPattern4::new(client.clone(), _m(&acc, "pct5")), + ratio_pct5_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct5_usd")), + ratio_pct95: MetricPattern4::new(client.clone(), _m(&acc, "pct95")), + ratio_pct95_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct95_usd")), + ratio_pct98: MetricPattern4::new(client.clone(), _m(&acc, "pct98")), + ratio_pct98_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct98_usd")), + ratio_pct99: MetricPattern4::new(client.clone(), _m(&acc, "pct99")), + ratio_pct99_usd: MetricPattern4::new(client.clone(), _m(&acc, "pct99_usd")), ratio_sd: Ratio1ySdPattern::new(client.clone(), acc.clone()), } } } +/// Pattern struct for repeated tree structure. +pub struct PercentilesPattern2 { + pub cost_basis_pct05: MetricPattern4, + pub cost_basis_pct10: MetricPattern4, + pub cost_basis_pct15: MetricPattern4, + pub cost_basis_pct20: MetricPattern4, + pub cost_basis_pct25: MetricPattern4, + pub cost_basis_pct30: MetricPattern4, + pub cost_basis_pct35: MetricPattern4, + pub cost_basis_pct40: MetricPattern4, + pub cost_basis_pct45: MetricPattern4, + pub cost_basis_pct50: MetricPattern4, + pub cost_basis_pct55: MetricPattern4, + pub cost_basis_pct60: MetricPattern4, + pub cost_basis_pct65: MetricPattern4, + pub cost_basis_pct70: MetricPattern4, + pub cost_basis_pct75: MetricPattern4, + pub cost_basis_pct80: MetricPattern4, + pub cost_basis_pct85: MetricPattern4, + pub cost_basis_pct90: MetricPattern4, + pub cost_basis_pct95: MetricPattern4, +} + +impl PercentilesPattern2 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + cost_basis_pct05: MetricPattern4::new(client.clone(), _m(&acc, "pct05")), + cost_basis_pct10: MetricPattern4::new(client.clone(), _m(&acc, "pct10")), + cost_basis_pct15: MetricPattern4::new(client.clone(), _m(&acc, "pct15")), + cost_basis_pct20: MetricPattern4::new(client.clone(), _m(&acc, "pct20")), + cost_basis_pct25: MetricPattern4::new(client.clone(), _m(&acc, "pct25")), + cost_basis_pct30: MetricPattern4::new(client.clone(), _m(&acc, "pct30")), + cost_basis_pct35: MetricPattern4::new(client.clone(), _m(&acc, "pct35")), + cost_basis_pct40: MetricPattern4::new(client.clone(), _m(&acc, "pct40")), + cost_basis_pct45: MetricPattern4::new(client.clone(), _m(&acc, "pct45")), + cost_basis_pct50: MetricPattern4::new(client.clone(), _m(&acc, "pct50")), + cost_basis_pct55: MetricPattern4::new(client.clone(), _m(&acc, "pct55")), + cost_basis_pct60: MetricPattern4::new(client.clone(), _m(&acc, "pct60")), + cost_basis_pct65: MetricPattern4::new(client.clone(), _m(&acc, "pct65")), + cost_basis_pct70: MetricPattern4::new(client.clone(), _m(&acc, "pct70")), + cost_basis_pct75: MetricPattern4::new(client.clone(), _m(&acc, "pct75")), + cost_basis_pct80: MetricPattern4::new(client.clone(), _m(&acc, "pct80")), + cost_basis_pct85: MetricPattern4::new(client.clone(), _m(&acc, "pct85")), + cost_basis_pct90: MetricPattern4::new(client.clone(), _m(&acc, "pct90")), + cost_basis_pct95: MetricPattern4::new(client.clone(), _m(&acc, "pct95")), + } + } +} + /// Pattern struct for repeated tree structure. pub struct RelativePattern5 { pub neg_unrealized_loss_rel_to_market_cap: MetricPattern3, @@ -3559,12 +3280,12 @@ pub struct RelativePattern5 { pub net_unrealized_pnl_rel_to_market_cap: MetricPattern3, pub net_unrealized_pnl_rel_to_own_market_cap: MetricPattern3, pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3, - pub nupl: MetricPattern5, + pub nupl: MetricPattern4, pub supply_in_loss_rel_to_circulating_supply: MetricPattern3, pub supply_in_loss_rel_to_own_supply: MetricPattern3, pub supply_in_profit_rel_to_circulating_supply: MetricPattern3, pub supply_in_profit_rel_to_own_supply: MetricPattern3, - pub supply_rel_to_circulating_supply: MetricPattern5, + pub supply_rel_to_circulating_supply: MetricPattern4, pub unrealized_loss_rel_to_market_cap: MetricPattern3, pub unrealized_loss_rel_to_own_market_cap: MetricPattern3, pub unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern3, @@ -3583,12 +3304,12 @@ impl RelativePattern5 { net_unrealized_pnl_rel_to_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_market_cap")), net_unrealized_pnl_rel_to_own_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_market_cap")), net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_total_unrealized_pnl")), - nupl: MetricPattern5::new(client.clone(), _m(&acc, "nupl")), + nupl: MetricPattern4::new(client.clone(), _m(&acc, "nupl")), supply_in_loss_rel_to_circulating_supply: MetricPattern3::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_circulating_supply")), supply_in_loss_rel_to_own_supply: MetricPattern3::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), supply_in_profit_rel_to_circulating_supply: MetricPattern3::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_circulating_supply")), supply_in_profit_rel_to_own_supply: MetricPattern3::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), - supply_rel_to_circulating_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), + supply_rel_to_circulating_supply: MetricPattern4::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), unrealized_loss_rel_to_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_market_cap")), unrealized_loss_rel_to_own_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_market_cap")), unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern3::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_total_unrealized_pnl")), @@ -3600,77 +3321,79 @@ impl RelativePattern5 { } /// Pattern struct for repeated tree structure. -pub struct AXbtPattern { - pub _1d_dominance: BlockCountPattern, - pub _1m_blocks_mined: MetricPattern5, - pub _1m_dominance: MetricPattern5, - pub _1w_blocks_mined: MetricPattern5, - pub _1w_dominance: MetricPattern5, - pub _1y_blocks_mined: MetricPattern5, - pub _1y_dominance: MetricPattern5, +pub struct AaopoolPattern { + pub _1m_blocks_mined: MetricPattern1, + pub _1m_dominance: MetricPattern1, + pub _1w_blocks_mined: MetricPattern1, + pub _1w_dominance: MetricPattern1, + pub _1y_blocks_mined: MetricPattern1, + pub _1y_dominance: MetricPattern1, + pub _24h_blocks_mined: MetricPattern1, + pub _24h_dominance: MetricPattern1, pub blocks_mined: BlockCountPattern, pub coinbase: UnclaimedRewardsPattern, - pub days_since_block: MetricPattern5, - pub dominance: BlockCountPattern, - pub fee: SentPattern, - pub subsidy: SentPattern, + pub days_since_block: MetricPattern4, + pub dominance: MetricPattern1, + pub fee: UnclaimedRewardsPattern, + pub subsidy: UnclaimedRewardsPattern, } -impl AXbtPattern { +impl AaopoolPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _1d_dominance: BlockCountPattern::new(client.clone(), _m(&acc, "1d_dominance")), - _1m_blocks_mined: MetricPattern5::new(client.clone(), _m(&acc, "1m_blocks_mined")), - _1m_dominance: MetricPattern5::new(client.clone(), _m(&acc, "1m_dominance")), - _1w_blocks_mined: MetricPattern5::new(client.clone(), _m(&acc, "1w_blocks_mined")), - _1w_dominance: MetricPattern5::new(client.clone(), _m(&acc, "1w_dominance")), - _1y_blocks_mined: MetricPattern5::new(client.clone(), _m(&acc, "1y_blocks_mined")), - _1y_dominance: MetricPattern5::new(client.clone(), _m(&acc, "1y_dominance")), + _1m_blocks_mined: MetricPattern1::new(client.clone(), _m(&acc, "1m_blocks_mined")), + _1m_dominance: MetricPattern1::new(client.clone(), _m(&acc, "1m_dominance")), + _1w_blocks_mined: MetricPattern1::new(client.clone(), _m(&acc, "1w_blocks_mined")), + _1w_dominance: MetricPattern1::new(client.clone(), _m(&acc, "1w_dominance")), + _1y_blocks_mined: MetricPattern1::new(client.clone(), _m(&acc, "1y_blocks_mined")), + _1y_dominance: MetricPattern1::new(client.clone(), _m(&acc, "1y_dominance")), + _24h_blocks_mined: MetricPattern1::new(client.clone(), _m(&acc, "24h_blocks_mined")), + _24h_dominance: MetricPattern1::new(client.clone(), _m(&acc, "24h_dominance")), blocks_mined: BlockCountPattern::new(client.clone(), _m(&acc, "blocks_mined")), coinbase: UnclaimedRewardsPattern::new(client.clone(), _m(&acc, "coinbase")), - days_since_block: MetricPattern5::new(client.clone(), _m(&acc, "days_since_block")), - dominance: BlockCountPattern::new(client.clone(), _m(&acc, "dominance")), - fee: SentPattern::new(client.clone(), acc.clone()), - subsidy: SentPattern::new(client.clone(), acc.clone()), + days_since_block: MetricPattern4::new(client.clone(), _m(&acc, "days_since_block")), + dominance: MetricPattern1::new(client.clone(), _m(&acc, "dominance")), + fee: UnclaimedRewardsPattern::new(client.clone(), _m(&acc, "fee")), + subsidy: UnclaimedRewardsPattern::new(client.clone(), _m(&acc, "subsidy")), } } } /// Pattern struct for repeated tree structure. pub struct PriceAgoPattern { - pub _10y: MetricPattern5, - pub _1d: MetricPattern5, - pub _1m: MetricPattern5, - pub _1w: MetricPattern5, - pub _1y: MetricPattern5, - pub _2y: MetricPattern5, - pub _3m: MetricPattern5, - pub _3y: MetricPattern5, - pub _4y: MetricPattern5, - pub _5y: MetricPattern5, - pub _6m: MetricPattern5, - pub _6y: MetricPattern5, - pub _8y: MetricPattern5, + pub _10y: MetricPattern4, + pub _1d: MetricPattern4, + pub _1m: MetricPattern4, + pub _1w: MetricPattern4, + pub _1y: MetricPattern4, + pub _2y: MetricPattern4, + pub _3m: MetricPattern4, + pub _3y: MetricPattern4, + pub _4y: MetricPattern4, + pub _5y: MetricPattern4, + pub _6m: MetricPattern4, + pub _6y: MetricPattern4, + pub _8y: MetricPattern4, } impl PriceAgoPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _10y: MetricPattern5::new(client.clone(), _m(&acc, "10y_ago")), - _1d: MetricPattern5::new(client.clone(), _m(&acc, "1d_ago")), - _1m: MetricPattern5::new(client.clone(), _m(&acc, "1m_ago")), - _1w: MetricPattern5::new(client.clone(), _m(&acc, "1w_ago")), - _1y: MetricPattern5::new(client.clone(), _m(&acc, "1y_ago")), - _2y: MetricPattern5::new(client.clone(), _m(&acc, "2y_ago")), - _3m: MetricPattern5::new(client.clone(), _m(&acc, "3m_ago")), - _3y: MetricPattern5::new(client.clone(), _m(&acc, "3y_ago")), - _4y: MetricPattern5::new(client.clone(), _m(&acc, "4y_ago")), - _5y: MetricPattern5::new(client.clone(), _m(&acc, "5y_ago")), - _6m: MetricPattern5::new(client.clone(), _m(&acc, "6m_ago")), - _6y: MetricPattern5::new(client.clone(), _m(&acc, "6y_ago")), - _8y: MetricPattern5::new(client.clone(), _m(&acc, "8y_ago")), + _10y: MetricPattern4::new(client.clone(), _m(&acc, "10y_ago")), + _1d: MetricPattern4::new(client.clone(), _m(&acc, "1d_ago")), + _1m: MetricPattern4::new(client.clone(), _m(&acc, "1m_ago")), + _1w: MetricPattern4::new(client.clone(), _m(&acc, "1w_ago")), + _1y: MetricPattern4::new(client.clone(), _m(&acc, "1y_ago")), + _2y: MetricPattern4::new(client.clone(), _m(&acc, "2y_ago")), + _3m: MetricPattern4::new(client.clone(), _m(&acc, "3m_ago")), + _3y: MetricPattern4::new(client.clone(), _m(&acc, "3y_ago")), + _4y: MetricPattern4::new(client.clone(), _m(&acc, "4y_ago")), + _5y: MetricPattern4::new(client.clone(), _m(&acc, "5y_ago")), + _6m: MetricPattern4::new(client.clone(), _m(&acc, "6m_ago")), + _6y: MetricPattern4::new(client.clone(), _m(&acc, "6y_ago")), + _8y: MetricPattern4::new(client.clone(), _m(&acc, "8y_ago")), } } } @@ -3713,70 +3436,70 @@ impl PeriodLumpSumStackPattern { /// Pattern struct for repeated tree structure. pub struct PeriodAveragePricePattern { - pub _10y: MetricPattern5, - pub _1m: MetricPattern5, - pub _1w: MetricPattern5, - pub _1y: MetricPattern5, - pub _2y: MetricPattern5, - pub _3m: MetricPattern5, - pub _3y: MetricPattern5, - pub _4y: MetricPattern5, - pub _5y: MetricPattern5, - pub _6m: MetricPattern5, - pub _6y: MetricPattern5, - pub _8y: MetricPattern5, + pub _10y: MetricPattern4, + pub _1m: MetricPattern4, + pub _1w: MetricPattern4, + pub _1y: MetricPattern4, + pub _2y: MetricPattern4, + pub _3m: MetricPattern4, + pub _3y: MetricPattern4, + pub _4y: MetricPattern4, + pub _5y: MetricPattern4, + pub _6m: MetricPattern4, + pub _6y: MetricPattern4, + pub _8y: MetricPattern4, } impl PeriodAveragePricePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _10y: MetricPattern5::new(client.clone(), if acc.is_empty() { "10y".to_string() } else { format!("10y_{acc}") }), - _1m: MetricPattern5::new(client.clone(), if acc.is_empty() { "1m".to_string() } else { format!("1m_{acc}") }), - _1w: MetricPattern5::new(client.clone(), if acc.is_empty() { "1w".to_string() } else { format!("1w_{acc}") }), - _1y: MetricPattern5::new(client.clone(), if acc.is_empty() { "1y".to_string() } else { format!("1y_{acc}") }), - _2y: MetricPattern5::new(client.clone(), if acc.is_empty() { "2y".to_string() } else { format!("2y_{acc}") }), - _3m: MetricPattern5::new(client.clone(), if acc.is_empty() { "3m".to_string() } else { format!("3m_{acc}") }), - _3y: MetricPattern5::new(client.clone(), if acc.is_empty() { "3y".to_string() } else { format!("3y_{acc}") }), - _4y: MetricPattern5::new(client.clone(), if acc.is_empty() { "4y".to_string() } else { format!("4y_{acc}") }), - _5y: MetricPattern5::new(client.clone(), if acc.is_empty() { "5y".to_string() } else { format!("5y_{acc}") }), - _6m: MetricPattern5::new(client.clone(), if acc.is_empty() { "6m".to_string() } else { format!("6m_{acc}") }), - _6y: MetricPattern5::new(client.clone(), if acc.is_empty() { "6y".to_string() } else { format!("6y_{acc}") }), - _8y: MetricPattern5::new(client.clone(), if acc.is_empty() { "8y".to_string() } else { format!("8y_{acc}") }), + _10y: MetricPattern4::new(client.clone(), if acc.is_empty() { "10y".to_string() } else { format!("10y_{acc}") }), + _1m: MetricPattern4::new(client.clone(), if acc.is_empty() { "1m".to_string() } else { format!("1m_{acc}") }), + _1w: MetricPattern4::new(client.clone(), if acc.is_empty() { "1w".to_string() } else { format!("1w_{acc}") }), + _1y: MetricPattern4::new(client.clone(), if acc.is_empty() { "1y".to_string() } else { format!("1y_{acc}") }), + _2y: MetricPattern4::new(client.clone(), if acc.is_empty() { "2y".to_string() } else { format!("2y_{acc}") }), + _3m: MetricPattern4::new(client.clone(), if acc.is_empty() { "3m".to_string() } else { format!("3m_{acc}") }), + _3y: MetricPattern4::new(client.clone(), if acc.is_empty() { "3y".to_string() } else { format!("3y_{acc}") }), + _4y: MetricPattern4::new(client.clone(), if acc.is_empty() { "4y".to_string() } else { format!("4y_{acc}") }), + _5y: MetricPattern4::new(client.clone(), if acc.is_empty() { "5y".to_string() } else { format!("5y_{acc}") }), + _6m: MetricPattern4::new(client.clone(), if acc.is_empty() { "6m".to_string() } else { format!("6m_{acc}") }), + _6y: MetricPattern4::new(client.clone(), if acc.is_empty() { "6y".to_string() } else { format!("6y_{acc}") }), + _8y: MetricPattern4::new(client.clone(), if acc.is_empty() { "8y".to_string() } else { format!("8y_{acc}") }), } } } /// Pattern struct for repeated tree structure. pub struct ClassAveragePricePattern { - pub _2015: MetricPattern5, - pub _2016: MetricPattern5, - pub _2017: MetricPattern5, - pub _2018: MetricPattern5, - pub _2019: MetricPattern5, - pub _2020: MetricPattern5, - pub _2021: MetricPattern5, - pub _2022: MetricPattern5, - pub _2023: MetricPattern5, - pub _2024: MetricPattern5, - pub _2025: MetricPattern5, + pub _2015: MetricPattern4, + pub _2016: MetricPattern4, + pub _2017: MetricPattern4, + pub _2018: MetricPattern4, + pub _2019: MetricPattern4, + pub _2020: MetricPattern4, + pub _2021: MetricPattern4, + pub _2022: MetricPattern4, + pub _2023: MetricPattern4, + pub _2024: MetricPattern4, + pub _2025: MetricPattern4, } impl ClassAveragePricePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _2015: MetricPattern5::new(client.clone(), _m(&acc, "2015_average_price")), - _2016: MetricPattern5::new(client.clone(), _m(&acc, "2016_average_price")), - _2017: MetricPattern5::new(client.clone(), _m(&acc, "2017_average_price")), - _2018: MetricPattern5::new(client.clone(), _m(&acc, "2018_average_price")), - _2019: MetricPattern5::new(client.clone(), _m(&acc, "2019_average_price")), - _2020: MetricPattern5::new(client.clone(), _m(&acc, "2020_average_price")), - _2021: MetricPattern5::new(client.clone(), _m(&acc, "2021_average_price")), - _2022: MetricPattern5::new(client.clone(), _m(&acc, "2022_average_price")), - _2023: MetricPattern5::new(client.clone(), _m(&acc, "2023_average_price")), - _2024: MetricPattern5::new(client.clone(), _m(&acc, "2024_average_price")), - _2025: MetricPattern5::new(client.clone(), _m(&acc, "2025_average_price")), + _2015: MetricPattern4::new(client.clone(), _m(&acc, "2015_average_price")), + _2016: MetricPattern4::new(client.clone(), _m(&acc, "2016_average_price")), + _2017: MetricPattern4::new(client.clone(), _m(&acc, "2017_average_price")), + _2018: MetricPattern4::new(client.clone(), _m(&acc, "2018_average_price")), + _2019: MetricPattern4::new(client.clone(), _m(&acc, "2019_average_price")), + _2020: MetricPattern4::new(client.clone(), _m(&acc, "2020_average_price")), + _2021: MetricPattern4::new(client.clone(), _m(&acc, "2021_average_price")), + _2022: MetricPattern4::new(client.clone(), _m(&acc, "2022_average_price")), + _2023: MetricPattern4::new(client.clone(), _m(&acc, "2023_average_price")), + _2024: MetricPattern4::new(client.clone(), _m(&acc, "2024_average_price")), + _2025: MetricPattern4::new(client.clone(), _m(&acc, "2025_average_price")), } } } @@ -3817,12 +3540,12 @@ impl RelativePattern2 { pub struct RelativePattern { pub neg_unrealized_loss_rel_to_market_cap: MetricPattern3, pub net_unrealized_pnl_rel_to_market_cap: MetricPattern3, - pub nupl: MetricPattern5, + pub nupl: MetricPattern4, pub supply_in_loss_rel_to_circulating_supply: MetricPattern3, pub supply_in_loss_rel_to_own_supply: MetricPattern3, pub supply_in_profit_rel_to_circulating_supply: MetricPattern3, pub supply_in_profit_rel_to_own_supply: MetricPattern3, - pub supply_rel_to_circulating_supply: MetricPattern5, + pub supply_rel_to_circulating_supply: MetricPattern4, pub unrealized_loss_rel_to_market_cap: MetricPattern3, pub unrealized_profit_rel_to_market_cap: MetricPattern3, } @@ -3833,26 +3556,56 @@ impl RelativePattern { Self { neg_unrealized_loss_rel_to_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_market_cap")), net_unrealized_pnl_rel_to_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_market_cap")), - nupl: MetricPattern5::new(client.clone(), _m(&acc, "nupl")), + nupl: MetricPattern4::new(client.clone(), _m(&acc, "nupl")), supply_in_loss_rel_to_circulating_supply: MetricPattern3::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_circulating_supply")), supply_in_loss_rel_to_own_supply: MetricPattern3::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), supply_in_profit_rel_to_circulating_supply: MetricPattern3::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_circulating_supply")), supply_in_profit_rel_to_own_supply: MetricPattern3::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), - supply_rel_to_circulating_supply: MetricPattern5::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), + supply_rel_to_circulating_supply: MetricPattern4::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), unrealized_loss_rel_to_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_market_cap")), unrealized_profit_rel_to_market_cap: MetricPattern3::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_market_cap")), } } } +/// Pattern struct for repeated tree structure. +pub struct AddrCountPattern { + pub all: MetricPattern1, + pub p2a: MetricPattern1, + pub p2pk33: MetricPattern1, + pub p2pk65: MetricPattern1, + pub p2pkh: MetricPattern1, + pub p2sh: MetricPattern1, + pub p2tr: MetricPattern1, + pub p2wpkh: MetricPattern1, + pub p2wsh: MetricPattern1, +} + +impl AddrCountPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + all: MetricPattern1::new(client.clone(), if acc.is_empty() { "addr".to_string() } else { format!("addr_{acc}") }), + p2a: MetricPattern1::new(client.clone(), if acc.is_empty() { "p2a_addr".to_string() } else { format!("p2a_addr_{acc}") }), + p2pk33: MetricPattern1::new(client.clone(), if acc.is_empty() { "p2pk33_addr".to_string() } else { format!("p2pk33_addr_{acc}") }), + p2pk65: MetricPattern1::new(client.clone(), if acc.is_empty() { "p2pk65_addr".to_string() } else { format!("p2pk65_addr_{acc}") }), + p2pkh: MetricPattern1::new(client.clone(), if acc.is_empty() { "p2pkh_addr".to_string() } else { format!("p2pkh_addr_{acc}") }), + p2sh: MetricPattern1::new(client.clone(), if acc.is_empty() { "p2sh_addr".to_string() } else { format!("p2sh_addr_{acc}") }), + p2tr: MetricPattern1::new(client.clone(), if acc.is_empty() { "p2tr_addr".to_string() } else { format!("p2tr_addr_{acc}") }), + p2wpkh: MetricPattern1::new(client.clone(), if acc.is_empty() { "p2wpkh_addr".to_string() } else { format!("p2wpkh_addr_{acc}") }), + p2wsh: MetricPattern1::new(client.clone(), if acc.is_empty() { "p2wsh_addr".to_string() } else { format!("p2wsh_addr_{acc}") }), + } + } +} + /// Pattern struct for repeated tree structure. pub struct UnrealizedPattern { pub neg_unrealized_loss: MetricPattern3, pub net_unrealized_pnl: MetricPattern3, - pub supply_in_loss: SupplyPattern2, - pub supply_in_loss_value: SupplyValuePattern, - pub supply_in_profit: SupplyPattern2, - pub supply_in_profit_value: SupplyValuePattern, + pub supply_in_loss: SupplyInLossPattern, + pub supply_in_loss_value: SupplyInLossValuePattern, + pub supply_in_profit: SupplyInLossPattern, + pub supply_in_profit_value: SupplyInLossValuePattern, pub total_unrealized_pnl: MetricPattern3, pub unrealized_loss: MetricPattern3, pub unrealized_profit: MetricPattern3, @@ -3864,10 +3617,10 @@ impl UnrealizedPattern { Self { neg_unrealized_loss: MetricPattern3::new(client.clone(), _m(&acc, "neg_unrealized_loss")), net_unrealized_pnl: MetricPattern3::new(client.clone(), _m(&acc, "net_unrealized_pnl")), - supply_in_loss: SupplyPattern2::new(client.clone(), _m(&acc, "supply_in_loss")), - supply_in_loss_value: SupplyValuePattern::new(client.clone(), _m(&acc, "supply_in_loss")), - supply_in_profit: SupplyPattern2::new(client.clone(), _m(&acc, "supply_in_profit")), - supply_in_profit_value: SupplyValuePattern::new(client.clone(), _m(&acc, "supply_in_profit")), + supply_in_loss: SupplyInLossPattern::new(client.clone(), _m(&acc, "supply_in_loss")), + supply_in_loss_value: SupplyInLossValuePattern::new(client.clone(), _m(&acc, "supply_in_loss")), + supply_in_profit: SupplyInLossPattern::new(client.clone(), _m(&acc, "supply_in_profit")), + supply_in_profit_value: SupplyInLossValuePattern::new(client.clone(), _m(&acc, "supply_in_profit")), total_unrealized_pnl: MetricPattern3::new(client.clone(), _m(&acc, "total_unrealized_pnl")), unrealized_loss: MetricPattern3::new(client.clone(), _m(&acc, "unrealized_loss")), unrealized_profit: MetricPattern3::new(client.clone(), _m(&acc, "unrealized_profit")), @@ -3875,62 +3628,6 @@ impl UnrealizedPattern { } } -/// Pattern struct for repeated tree structure. -pub struct AddresstypeToHeightToAddrCountPattern { - pub p2a: MetricPattern26, - pub p2pk33: MetricPattern26, - pub p2pk65: MetricPattern26, - pub p2pkh: MetricPattern26, - pub p2sh: MetricPattern26, - pub p2tr: MetricPattern26, - pub p2wpkh: MetricPattern26, - pub p2wsh: MetricPattern26, -} - -impl AddresstypeToHeightToAddrCountPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - p2a: MetricPattern26::new(client.clone(), if acc.is_empty() { "p2a".to_string() } else { format!("p2a_{acc}") }), - p2pk33: MetricPattern26::new(client.clone(), if acc.is_empty() { "p2pk33".to_string() } else { format!("p2pk33_{acc}") }), - p2pk65: MetricPattern26::new(client.clone(), if acc.is_empty() { "p2pk65".to_string() } else { format!("p2pk65_{acc}") }), - p2pkh: MetricPattern26::new(client.clone(), if acc.is_empty() { "p2pkh".to_string() } else { format!("p2pkh_{acc}") }), - p2sh: MetricPattern26::new(client.clone(), if acc.is_empty() { "p2sh".to_string() } else { format!("p2sh_{acc}") }), - p2tr: MetricPattern26::new(client.clone(), if acc.is_empty() { "p2tr".to_string() } else { format!("p2tr_{acc}") }), - p2wpkh: MetricPattern26::new(client.clone(), if acc.is_empty() { "p2wpkh".to_string() } else { format!("p2wpkh_{acc}") }), - p2wsh: MetricPattern26::new(client.clone(), if acc.is_empty() { "p2wsh".to_string() } else { format!("p2wsh_{acc}") }), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct CountPattern2 { - pub average: MetricPattern2, - pub cumulative: MetricPattern6, - pub distribution: BlockIntervalPattern, - pub max: MetricPattern6, - pub min: MetricPattern6, - pub minmax: MinmaxPattern, - pub sum: MetricPattern6, - pub sum_cum: SumCumPattern, -} - -impl CountPattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - average: MetricPattern2::new(client.clone(), _m(&acc, "average")), - cumulative: MetricPattern6::new(client.clone(), _m(&acc, "cumulative")), - distribution: BlockIntervalPattern::new(client.clone(), acc.clone()), - max: MetricPattern6::new(client.clone(), _m(&acc, "max")), - min: MetricPattern6::new(client.clone(), _m(&acc, "min")), - minmax: MinmaxPattern::new(client.clone(), acc.clone()), - sum: MetricPattern6::new(client.clone(), _m(&acc, "sum")), - sum_cum: SumCumPattern::new(client.clone(), acc.clone()), - } - } -} - /// Pattern struct for repeated tree structure. pub struct _0satsPattern { pub activity: ActivityPattern2, @@ -3938,7 +3635,7 @@ pub struct _0satsPattern { pub cost_basis: CostBasisPattern, pub realized: RealizedPattern, pub relative: RelativePattern, - pub supply: SupplyPattern3, + pub supply: SupplyPattern2, pub unrealized: UnrealizedPattern, } @@ -3951,7 +3648,7 @@ impl _0satsPattern { cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), realized: RealizedPattern::new(client.clone(), acc.clone()), relative: RelativePattern::new(client.clone(), acc.clone()), - supply: SupplyPattern3::new(client.clone(), acc.clone()), + supply: SupplyPattern2::new(client.clone(), acc.clone()), unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), } } @@ -3959,60 +3656,34 @@ impl _0satsPattern { /// Pattern struct for repeated tree structure. pub struct PeriodCagrPattern { - pub _10y: MetricPattern5, - pub _2y: MetricPattern5, - pub _3y: MetricPattern5, - pub _4y: MetricPattern5, - pub _5y: MetricPattern5, - pub _6y: MetricPattern5, - pub _8y: MetricPattern5, + pub _10y: MetricPattern4, + pub _2y: MetricPattern4, + pub _3y: MetricPattern4, + pub _4y: MetricPattern4, + pub _5y: MetricPattern4, + pub _6y: MetricPattern4, + pub _8y: MetricPattern4, } impl PeriodCagrPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _10y: MetricPattern5::new(client.clone(), if acc.is_empty() { "10y".to_string() } else { format!("10y_{acc}") }), - _2y: MetricPattern5::new(client.clone(), if acc.is_empty() { "2y".to_string() } else { format!("2y_{acc}") }), - _3y: MetricPattern5::new(client.clone(), if acc.is_empty() { "3y".to_string() } else { format!("3y_{acc}") }), - _4y: MetricPattern5::new(client.clone(), if acc.is_empty() { "4y".to_string() } else { format!("4y_{acc}") }), - _5y: MetricPattern5::new(client.clone(), if acc.is_empty() { "5y".to_string() } else { format!("5y_{acc}") }), - _6y: MetricPattern5::new(client.clone(), if acc.is_empty() { "6y".to_string() } else { format!("6y_{acc}") }), - _8y: MetricPattern5::new(client.clone(), if acc.is_empty() { "8y".to_string() } else { format!("8y_{acc}") }), + _10y: MetricPattern4::new(client.clone(), if acc.is_empty() { "10y".to_string() } else { format!("10y_{acc}") }), + _2y: MetricPattern4::new(client.clone(), if acc.is_empty() { "2y".to_string() } else { format!("2y_{acc}") }), + _3y: MetricPattern4::new(client.clone(), if acc.is_empty() { "3y".to_string() } else { format!("3y_{acc}") }), + _4y: MetricPattern4::new(client.clone(), if acc.is_empty() { "4y".to_string() } else { format!("4y_{acc}") }), + _5y: MetricPattern4::new(client.clone(), if acc.is_empty() { "5y".to_string() } else { format!("5y_{acc}") }), + _6y: MetricPattern4::new(client.clone(), if acc.is_empty() { "6y".to_string() } else { format!("6y_{acc}") }), + _8y: MetricPattern4::new(client.clone(), if acc.is_empty() { "8y".to_string() } else { format!("8y_{acc}") }), } } } /// Pattern struct for repeated tree structure. -pub struct BlockSizePattern { - pub average: MetricPattern6, - pub cumulative: MetricPattern4, - pub distribution: BlockIntervalPattern, - pub max: MetricPattern6, - pub min: MetricPattern6, - pub sum: MetricPattern6, - pub sum_cum: SumCumPattern, -} - -impl BlockSizePattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - average: MetricPattern6::new(client.clone(), _m(&acc, "average")), - cumulative: MetricPattern4::new(client.clone(), _m(&acc, "cumulative")), - distribution: BlockIntervalPattern::new(client.clone(), acc.clone()), - max: MetricPattern6::new(client.clone(), _m(&acc, "max")), - min: MetricPattern6::new(client.clone(), _m(&acc, "min")), - sum: MetricPattern6::new(client.clone(), _m(&acc, "sum")), - sum_cum: SumCumPattern::new(client.clone(), acc.clone()), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct DollarsPattern { +pub struct BitcoinPattern { pub average: MetricPattern2, - pub base: MetricPattern26, + pub base: MetricPattern23, pub cumulative: MetricPattern1, pub max: MetricPattern2, pub min: MetricPattern2, @@ -4020,12 +3691,12 @@ pub struct DollarsPattern { pub sum: MetricPattern2, } -impl DollarsPattern { +impl BitcoinPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { average: MetricPattern2::new(client.clone(), _m(&acc, "average")), - base: MetricPattern26::new(client.clone(), acc.clone()), + base: MetricPattern23::new(client.clone(), acc.clone()), cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), max: MetricPattern2::new(client.clone(), _m(&acc, "max")), min: MetricPattern2::new(client.clone(), _m(&acc, "min")), @@ -4035,85 +3706,13 @@ impl DollarsPattern { } } -/// Pattern struct for repeated tree structure. -pub struct _10yPattern { - pub activity: ActivityPattern2, - pub cost_basis: CostBasisPattern, - pub realized: RealizedPattern4, - pub relative: RelativePattern, - pub supply: SupplyPattern3, - pub unrealized: UnrealizedPattern, -} - -impl _10yPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - activity: ActivityPattern2::new(client.clone(), acc.clone()), - cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), - realized: RealizedPattern4::new(client.clone(), acc.clone()), - relative: RelativePattern::new(client.clone(), acc.clone()), - supply: SupplyPattern3::new(client.clone(), acc.clone()), - unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct _10yTo12yPattern { - pub activity: ActivityPattern2, - pub cost_basis: CostBasisPattern2, - pub realized: RealizedPattern2, - pub relative: RelativePattern2, - pub supply: SupplyPattern3, - pub unrealized: UnrealizedPattern, -} - -impl _10yTo12yPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - activity: ActivityPattern2::new(client.clone(), acc.clone()), - cost_basis: CostBasisPattern2::new(client.clone(), acc.clone()), - realized: RealizedPattern2::new(client.clone(), acc.clone()), - relative: RelativePattern2::new(client.clone(), acc.clone()), - supply: SupplyPattern3::new(client.clone(), acc.clone()), - unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct _100btcPattern { - pub activity: ActivityPattern2, - pub cost_basis: CostBasisPattern, - pub realized: RealizedPattern, - pub relative: RelativePattern, - pub supply: SupplyPattern3, - pub unrealized: UnrealizedPattern, -} - -impl _100btcPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - activity: ActivityPattern2::new(client.clone(), acc.clone()), - cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), - realized: RealizedPattern::new(client.clone(), acc.clone()), - relative: RelativePattern::new(client.clone(), acc.clone()), - supply: SupplyPattern3::new(client.clone(), acc.clone()), - unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), - } - } -} - /// Pattern struct for repeated tree structure. pub struct _0satsPattern2 { pub activity: ActivityPattern2, pub cost_basis: CostBasisPattern, pub realized: RealizedPattern, pub relative: RelativePattern4, - pub supply: SupplyPattern3, + pub supply: SupplyPattern2, pub unrealized: UnrealizedPattern, } @@ -4125,31 +3724,103 @@ impl _0satsPattern2 { cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), realized: RealizedPattern::new(client.clone(), acc.clone()), relative: RelativePattern4::new(client.clone(), _m(&acc, "supply_in")), - supply: SupplyPattern3::new(client.clone(), acc.clone()), + supply: SupplyPattern2::new(client.clone(), acc.clone()), unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. -pub struct BitcoinPattern { +pub struct _10yTo12yPattern { + pub activity: ActivityPattern2, + pub cost_basis: CostBasisPattern2, + pub realized: RealizedPattern2, + pub relative: RelativePattern2, + pub supply: SupplyPattern2, + pub unrealized: UnrealizedPattern, +} + +impl _10yTo12yPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + activity: ActivityPattern2::new(client.clone(), acc.clone()), + cost_basis: CostBasisPattern2::new(client.clone(), acc.clone()), + realized: RealizedPattern2::new(client.clone(), acc.clone()), + relative: RelativePattern2::new(client.clone(), acc.clone()), + supply: SupplyPattern2::new(client.clone(), acc.clone()), + unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct _10yPattern { + pub activity: ActivityPattern2, + pub cost_basis: CostBasisPattern, + pub realized: RealizedPattern4, + pub relative: RelativePattern, + pub supply: SupplyPattern2, + pub unrealized: UnrealizedPattern, +} + +impl _10yPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + activity: ActivityPattern2::new(client.clone(), acc.clone()), + cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), + realized: RealizedPattern4::new(client.clone(), acc.clone()), + relative: RelativePattern::new(client.clone(), acc.clone()), + supply: SupplyPattern2::new(client.clone(), acc.clone()), + unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct _100btcPattern { + pub activity: ActivityPattern2, + pub cost_basis: CostBasisPattern, + pub realized: RealizedPattern, + pub relative: RelativePattern, + pub supply: SupplyPattern2, + pub unrealized: UnrealizedPattern, +} + +impl _100btcPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + activity: ActivityPattern2::new(client.clone(), acc.clone()), + cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), + realized: RealizedPattern::new(client.clone(), acc.clone()), + relative: RelativePattern::new(client.clone(), acc.clone()), + supply: SupplyPattern2::new(client.clone(), acc.clone()), + unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct BlockSizePattern { pub average: MetricPattern2, - pub base: MetricPattern26, - pub cumulative: MetricPattern2, + pub cumulative: MetricPattern1, pub max: MetricPattern2, pub min: MetricPattern2, + pub percentiles: PercentilesPattern, pub sum: MetricPattern2, } -impl BitcoinPattern { +impl BlockSizePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { average: MetricPattern2::new(client.clone(), _m(&acc, "average")), - base: MetricPattern26::new(client.clone(), acc.clone()), - cumulative: MetricPattern2::new(client.clone(), _m(&acc, "cum")), + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), max: MetricPattern2::new(client.clone(), _m(&acc, "max")), min: MetricPattern2::new(client.clone(), _m(&acc, "min")), + percentiles: PercentilesPattern::new(client.clone(), acc.clone()), sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), } } @@ -4159,8 +3830,8 @@ impl BitcoinPattern { pub struct ActivityPattern2 { pub coinblocks_destroyed: BlockCountPattern, pub coindays_destroyed: BlockCountPattern, - pub satblocks_destroyed: MetricPattern26, - pub satdays_destroyed: MetricPattern26, + pub satblocks_destroyed: MetricPattern23, + pub satdays_destroyed: MetricPattern23, pub sent: SentPattern, } @@ -4170,209 +3841,113 @@ impl ActivityPattern2 { Self { coinblocks_destroyed: BlockCountPattern::new(client.clone(), _m(&acc, "coinblocks_destroyed")), coindays_destroyed: BlockCountPattern::new(client.clone(), _m(&acc, "coindays_destroyed")), - satblocks_destroyed: MetricPattern26::new(client.clone(), _m(&acc, "satblocks_destroyed")), - satdays_destroyed: MetricPattern26::new(client.clone(), _m(&acc, "satdays_destroyed")), + satblocks_destroyed: MetricPattern23::new(client.clone(), _m(&acc, "satblocks_destroyed")), + satdays_destroyed: MetricPattern23::new(client.clone(), _m(&acc, "satdays_destroyed")), sent: SentPattern::new(client.clone(), _m(&acc, "sent")), } } } -/// Pattern struct for repeated tree structure. -pub struct SentPattern { - pub base: MetricPattern26, - pub bitcoin: BlockCountPattern, - pub dollars: SumCumPattern, - pub dollars_source: MetricPattern26, - pub sats: SumCumPattern, -} - -impl SentPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - base: MetricPattern26::new(client.clone(), _m(&acc, "height_fee")), - bitcoin: BlockCountPattern::new(client.clone(), _m(&acc, "btc")), - dollars: SumCumPattern::new(client.clone(), _m(&acc, "usd")), - dollars_source: MetricPattern26::new(client.clone(), _m(&acc, "usd")), - sats: SumCumPattern::new(client.clone(), _m(&acc, "fee")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct SupplyPattern3 { - pub supply: SupplyPattern2, - pub supply_half: ActiveSupplyPattern, - pub supply_half_value: ActiveSupplyPattern, - pub supply_value: SupplyValuePattern, - pub utxo_count: MetricPattern1, -} - -impl SupplyPattern3 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - supply: SupplyPattern2::new(client.clone(), _m(&acc, "supply")), - supply_half: ActiveSupplyPattern::new(client.clone(), _m(&acc, "supply_half")), - supply_half_value: ActiveSupplyPattern::new(client.clone(), _m(&acc, "supply_half")), - supply_value: SupplyValuePattern::new(client.clone(), _m(&acc, "supply")), - utxo_count: MetricPattern1::new(client.clone(), _m(&acc, "utxo_count")), - } - } -} - /// Pattern struct for repeated tree structure. pub struct PercentilesPattern { - pub median: MetricPattern22, - pub pct10: MetricPattern22, - pub pct25: MetricPattern22, - pub pct75: MetricPattern22, - pub pct90: MetricPattern22, + pub median: MetricPattern19, + pub pct10: MetricPattern19, + pub pct25: MetricPattern19, + pub pct75: MetricPattern19, + pub pct90: MetricPattern19, } impl PercentilesPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - median: MetricPattern22::new(client.clone(), _m(&acc, "median")), - pct10: MetricPattern22::new(client.clone(), _m(&acc, "pct10")), - pct25: MetricPattern22::new(client.clone(), _m(&acc, "pct25")), - pct75: MetricPattern22::new(client.clone(), _m(&acc, "pct75")), - pct90: MetricPattern22::new(client.clone(), _m(&acc, "pct90")), + median: MetricPattern19::new(client.clone(), _m(&acc, "median")), + pct10: MetricPattern19::new(client.clone(), _m(&acc, "pct10")), + pct25: MetricPattern19::new(client.clone(), _m(&acc, "pct25")), + pct75: MetricPattern19::new(client.clone(), _m(&acc, "pct75")), + pct90: MetricPattern19::new(client.clone(), _m(&acc, "pct90")), } } } /// Pattern struct for repeated tree structure. -pub struct SupplyPattern2 { - pub base: MetricPattern26, - pub bitcoin: MetricPattern5, - pub dollars: MetricPattern5, - pub sats: MetricPattern7, -} - -impl SupplyPattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - base: MetricPattern26::new(client.clone(), acc.clone()), - bitcoin: MetricPattern5::new(client.clone(), _m(&acc, "btc")), - dollars: MetricPattern5::new(client.clone(), _m(&acc, "usd")), - sats: MetricPattern7::new(client.clone(), acc.clone()), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct PriceHighInSatsPattern { - pub dateindex: MetricPattern22, - pub height: MetricPattern26, - pub max: MetricPattern24, - pub rest: MetricPattern7, -} - -impl PriceHighInSatsPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - dateindex: MetricPattern22::new(client.clone(), acc.clone()), - height: MetricPattern26::new(client.clone(), acc.clone()), - max: MetricPattern24::new(client.clone(), _m(&acc, "max")), - rest: MetricPattern7::new(client.clone(), _m(&acc, "max")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct PriceLowInSatsPattern { - pub dateindex: MetricPattern22, - pub height: MetricPattern26, - pub min: MetricPattern24, - pub rest: MetricPattern7, -} - -impl PriceLowInSatsPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - dateindex: MetricPattern22::new(client.clone(), acc.clone()), - height: MetricPattern26::new(client.clone(), acc.clone()), - min: MetricPattern24::new(client.clone(), _m(&acc, "min")), - rest: MetricPattern7::new(client.clone(), _m(&acc, "min")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct BlockIntervalPattern { - pub average: MetricPattern22, - pub max: MetricPattern22, - pub min: MetricPattern22, +pub struct IntervalPattern { + pub average: MetricPattern2, + pub base: MetricPattern23, + pub max: MetricPattern2, + pub min: MetricPattern2, pub percentiles: PercentilesPattern, } -impl BlockIntervalPattern { +impl IntervalPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - average: MetricPattern22::new(client.clone(), _m(&acc, "average")), - max: MetricPattern22::new(client.clone(), _m(&acc, "max")), - min: MetricPattern22::new(client.clone(), _m(&acc, "min")), + average: MetricPattern2::new(client.clone(), _m(&acc, "average")), + base: MetricPattern23::new(client.clone(), acc.clone()), + max: MetricPattern2::new(client.clone(), _m(&acc, "max")), + min: MetricPattern2::new(client.clone(), _m(&acc, "min")), percentiles: PercentilesPattern::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. -pub struct ActiveSupplyPattern { - pub bitcoin: MetricPattern1, - pub dollars: MetricPattern1, - pub sats: MetricPattern1, +pub struct SupplyInLossPattern { + pub base: MetricPattern7, + pub bitcoin: MetricPattern4, + pub dollars: MetricPattern4, + pub sats: MetricPattern5, } -impl ActiveSupplyPattern { +impl SupplyInLossPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: MetricPattern1::new(client.clone(), _m(&acc, "btc")), - dollars: MetricPattern1::new(client.clone(), _m(&acc, "usd")), - sats: MetricPattern1::new(client.clone(), acc.clone()), + base: MetricPattern7::new(client.clone(), acc.clone()), + bitcoin: MetricPattern4::new(client.clone(), _m(&acc, "btc")), + dollars: MetricPattern4::new(client.clone(), _m(&acc, "usd")), + sats: MetricPattern5::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. -pub struct CostBasisPattern2 { - pub max_cost_basis: MetricPattern1, - pub min_cost_basis: MetricPattern1, - pub percentiles: PercentilesPattern2, +pub struct PriceHighSatsPattern { + pub dateindex: MetricPattern19, + pub difficultyepoch: MetricPattern21, + pub height: MetricPattern23, + pub rest: MetricPattern5, } -impl CostBasisPattern2 { +impl PriceHighSatsPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - max_cost_basis: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")), - min_cost_basis: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")), - percentiles: PercentilesPattern2::new(client.clone(), _m(&acc, "cost_basis")), + dateindex: MetricPattern19::new(client.clone(), acc.clone()), + difficultyepoch: MetricPattern21::new(client.clone(), _m(&acc, "max")), + height: MetricPattern23::new(client.clone(), acc.clone()), + rest: MetricPattern5::new(client.clone(), _m(&acc, "max")), } } } /// Pattern struct for repeated tree structure. -pub struct CoinbasePattern { - pub bitcoin: BitcoinPattern, - pub dollars: DollarsPattern, - pub sats: DollarsPattern, +pub struct TxVsizePattern { + pub average: MetricPattern1, + pub max: MetricPattern1, + pub min: MetricPattern1, + pub percentiles: PercentilesPattern, } -impl CoinbasePattern { +impl TxVsizePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: BitcoinPattern::new(client.clone(), _m(&acc, "btc")), - dollars: DollarsPattern::new(client.clone(), _m(&acc, "usd")), - sats: DollarsPattern::new(client.clone(), acc.clone()), + average: MetricPattern1::new(client.clone(), _m(&acc, "average")), + max: MetricPattern1::new(client.clone(), _m(&acc, "max")), + min: MetricPattern1::new(client.clone(), _m(&acc, "min")), + percentiles: PercentilesPattern::new(client.clone(), acc.clone()), } } } @@ -4396,17 +3971,107 @@ impl UnclaimedRewardsPattern { } /// Pattern struct for repeated tree structure. -pub struct BlockCountPattern { - pub base: MetricPattern26, +pub struct CostBasisPattern2 { + pub max: MetricPattern1, + pub min: MetricPattern1, + pub percentiles: PercentilesPattern2, +} + +impl CostBasisPattern2 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + max: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")), + min: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")), + percentiles: PercentilesPattern2::new(client.clone(), _m(&acc, "cost_basis")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct ActiveSupplyPattern { + pub bitcoin: MetricPattern1, + pub dollars: MetricPattern1, + pub sats: MetricPattern1, +} + +impl ActiveSupplyPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + bitcoin: MetricPattern1::new(client.clone(), _m(&acc, "btc")), + dollars: MetricPattern1::new(client.clone(), _m(&acc, "usd")), + sats: MetricPattern1::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SentPattern { + pub bitcoin: BlockCountPattern, + pub dollars: BlockCountPattern, + pub sats: SatsPattern, +} + +impl SentPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + bitcoin: BlockCountPattern::new(client.clone(), _m(&acc, "btc")), + dollars: BlockCountPattern::new(client.clone(), _m(&acc, "usd")), + sats: SatsPattern::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct CoinbasePattern { + pub bitcoin: BitcoinPattern, + pub dollars: BitcoinPattern, + pub sats: BitcoinPattern, +} + +impl CoinbasePattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + bitcoin: BitcoinPattern::new(client.clone(), _m(&acc, "btc")), + dollars: BitcoinPattern::new(client.clone(), _m(&acc, "usd")), + sats: BitcoinPattern::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SupplyPattern2 { + pub supply: ActiveSupplyPattern, + pub supply_half: ActiveSupplyPattern, + pub utxo_count: MetricPattern1, +} + +impl SupplyPattern2 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + supply: ActiveSupplyPattern::new(client.clone(), _m(&acc, "supply")), + supply_half: ActiveSupplyPattern::new(client.clone(), _m(&acc, "supply_half")), + utxo_count: MetricPattern1::new(client.clone(), _m(&acc, "utxo_count")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SatsPattern { + pub base: MetricPattern23, pub cumulative: MetricPattern2, pub sum: MetricPattern2, } -impl BlockCountPattern { +impl SatsPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - base: MetricPattern26::new(client.clone(), acc.clone()), + base: MetricPattern23::new(client.clone(), acc.clone()), cumulative: MetricPattern2::new(client.clone(), _m(&acc, "cumulative")), sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), } @@ -4414,17 +4079,49 @@ impl BlockCountPattern { } /// Pattern struct for repeated tree structure. -pub struct SupplyValuePattern { - pub bitcoin: MetricPattern26, - pub dollars: MetricPattern26, +pub struct CostBasisPattern { + pub max: MetricPattern1, + pub min: MetricPattern1, } -impl SupplyValuePattern { +impl CostBasisPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: MetricPattern26::new(client.clone(), _m(&acc, "btc")), - dollars: MetricPattern26::new(client.clone(), _m(&acc, "usd")), + max: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")), + min: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SupplyInLossValuePattern { + pub bitcoin: MetricPattern23, + pub dollars: MetricPattern23, +} + +impl SupplyInLossValuePattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + bitcoin: MetricPattern23::new(client.clone(), _m(&acc, "btc")), + dollars: MetricPattern23::new(client.clone(), _m(&acc, "usd")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct _1dReturns1mSdPattern { + pub sd: MetricPattern4, + pub sma: MetricPattern4, +} + +impl _1dReturns1mSdPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + sd: MetricPattern4::new(client.clone(), _m(&acc, "sd")), + sma: MetricPattern4::new(client.clone(), _m(&acc, "sma")), } } } @@ -4446,111 +4143,31 @@ impl RelativePattern4 { } /// Pattern struct for repeated tree structure. -pub struct CostBasisPattern { - pub max_cost_basis: MetricPattern1, - pub min_cost_basis: MetricPattern1, -} - -impl CostBasisPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - max_cost_basis: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")), - min_cost_basis: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct _1dReturns1mSdPattern { - pub sd: MetricPattern5, - pub sma: MetricPattern5, -} - -impl _1dReturns1mSdPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - sd: MetricPattern5::new(client.clone(), _m(&acc, "sd")), - sma: MetricPattern5::new(client.clone(), _m(&acc, "sma")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct MinmaxPattern { - pub max: MetricPattern22, - pub min: MetricPattern22, -} - -impl MinmaxPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - max: MetricPattern22::new(client.clone(), _m(&acc, "max")), - min: MetricPattern22::new(client.clone(), _m(&acc, "min")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct SumCumPattern { +pub struct BlockCountPattern { pub cumulative: MetricPattern1, - pub sum: MetricPattern2, + pub sum: MetricPattern1, } -impl SumCumPattern { +impl BlockCountPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), - sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct IndexesPattern2 { - pub dateindex: MetricPattern22, - pub rest: MetricPattern7, -} - -impl IndexesPattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - dateindex: MetricPattern22::new(client.clone(), acc.clone()), - rest: MetricPattern7::new(client.clone(), _m(&acc, "average")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct DifficultyAdjustmentPattern { - pub base: MetricPattern26, - pub rest: MetricPattern2, -} - -impl DifficultyAdjustmentPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - base: MetricPattern26::new(client.clone(), acc.clone()), - rest: MetricPattern2::new(client.clone(), _m(&acc, "sum")), + sum: MetricPattern1::new(client.clone(), acc.clone()), } } } /// Pattern struct for repeated tree structure. pub struct RealizedPriceExtraPattern { - pub ratio: MetricPattern5, + pub ratio: MetricPattern4, } impl RealizedPriceExtraPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - ratio: MetricPattern5::new(client.clone(), _m(&acc, "ratio")), + ratio: MetricPattern4::new(client.clone(), _m(&acc, "ratio")), } } } @@ -4559,163 +4176,194 @@ impl RealizedPriceExtraPattern { /// Catalog tree node. pub struct CatalogTree { - pub computed: CatalogTree_Computed, - pub indexed: CatalogTree_Indexed, + pub addresses: CatalogTree_Addresses, + pub blocks: CatalogTree_Blocks, + pub cointime: CatalogTree_Cointime, + pub constants: CatalogTree_Constants, + pub distribution: CatalogTree_Distribution, + pub indexes: CatalogTree_Indexes, + pub inputs: CatalogTree_Inputs, + pub market: CatalogTree_Market, + pub outputs: CatalogTree_Outputs, + pub pools: CatalogTree_Pools, + pub positions: CatalogTree_Positions, + pub price: CatalogTree_Price, + pub scripts: CatalogTree_Scripts, + pub supply: CatalogTree_Supply, + pub transactions: CatalogTree_Transactions, } impl CatalogTree { pub fn new(client: Arc, base_path: String) -> Self { Self { - computed: CatalogTree_Computed::new(client.clone(), format!("{base_path}_computed")), - indexed: CatalogTree_Indexed::new(client.clone(), format!("{base_path}_indexed")), + addresses: CatalogTree_Addresses::new(client.clone(), format!("{base_path}_addresses")), + blocks: CatalogTree_Blocks::new(client.clone(), format!("{base_path}_blocks")), + cointime: CatalogTree_Cointime::new(client.clone(), format!("{base_path}_cointime")), + constants: CatalogTree_Constants::new(client.clone(), format!("{base_path}_constants")), + distribution: CatalogTree_Distribution::new(client.clone(), format!("{base_path}_distribution")), + indexes: CatalogTree_Indexes::new(client.clone(), format!("{base_path}_indexes")), + inputs: CatalogTree_Inputs::new(client.clone(), format!("{base_path}_inputs")), + market: CatalogTree_Market::new(client.clone(), format!("{base_path}_market")), + outputs: CatalogTree_Outputs::new(client.clone(), format!("{base_path}_outputs")), + pools: CatalogTree_Pools::new(client.clone(), format!("{base_path}_pools")), + positions: CatalogTree_Positions::new(client.clone(), format!("{base_path}_positions")), + price: CatalogTree_Price::new(client.clone(), format!("{base_path}_price")), + scripts: CatalogTree_Scripts::new(client.clone(), format!("{base_path}_scripts")), + supply: CatalogTree_Supply::new(client.clone(), format!("{base_path}_supply")), + transactions: CatalogTree_Transactions::new(client.clone(), format!("{base_path}_transactions")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed { - pub blocks: CatalogTree_Computed_Blocks, - pub cointime: CatalogTree_Computed_Cointime, - pub constants: CatalogTree_Computed_Constants, - pub distribution: CatalogTree_Computed_Distribution, - pub indexes: CatalogTree_Computed_Indexes, - pub inputs: CatalogTree_Computed_Inputs, - pub market: CatalogTree_Computed_Market, - pub outputs: CatalogTree_Computed_Outputs, - pub pools: CatalogTree_Computed_Pools, - pub positions: CatalogTree_Computed_Positions, - pub price: CatalogTree_Computed_Price, - pub scripts: CatalogTree_Computed_Scripts, - pub supply: CatalogTree_Computed_Supply, - pub transactions: CatalogTree_Computed_Transactions, +pub struct CatalogTree_Addresses { + pub first_p2aaddressindex: MetricPattern23, + pub first_p2pk33addressindex: MetricPattern23, + pub first_p2pk65addressindex: MetricPattern23, + pub first_p2pkhaddressindex: MetricPattern23, + pub first_p2shaddressindex: MetricPattern23, + pub first_p2traddressindex: MetricPattern23, + pub first_p2wpkhaddressindex: MetricPattern23, + pub first_p2wshaddressindex: MetricPattern23, + pub p2abytes: MetricPattern27, + pub p2pk33bytes: MetricPattern29, + pub p2pk65bytes: MetricPattern30, + pub p2pkhbytes: MetricPattern31, + pub p2shbytes: MetricPattern32, + pub p2trbytes: MetricPattern33, + pub p2wpkhbytes: MetricPattern34, + pub p2wshbytes: MetricPattern35, } -impl CatalogTree_Computed { +impl CatalogTree_Addresses { pub fn new(client: Arc, base_path: String) -> Self { Self { - blocks: CatalogTree_Computed_Blocks::new(client.clone(), format!("{base_path}_blocks")), - cointime: CatalogTree_Computed_Cointime::new(client.clone(), format!("{base_path}_cointime")), - constants: CatalogTree_Computed_Constants::new(client.clone(), format!("{base_path}_constants")), - distribution: CatalogTree_Computed_Distribution::new(client.clone(), format!("{base_path}_distribution")), - indexes: CatalogTree_Computed_Indexes::new(client.clone(), format!("{base_path}_indexes")), - inputs: CatalogTree_Computed_Inputs::new(client.clone(), format!("{base_path}_inputs")), - market: CatalogTree_Computed_Market::new(client.clone(), format!("{base_path}_market")), - outputs: CatalogTree_Computed_Outputs::new(client.clone(), format!("{base_path}_outputs")), - pools: CatalogTree_Computed_Pools::new(client.clone(), format!("{base_path}_pools")), - positions: CatalogTree_Computed_Positions::new(client.clone(), format!("{base_path}_positions")), - price: CatalogTree_Computed_Price::new(client.clone(), format!("{base_path}_price")), - scripts: CatalogTree_Computed_Scripts::new(client.clone(), format!("{base_path}_scripts")), - supply: CatalogTree_Computed_Supply::new(client.clone(), format!("{base_path}_supply")), - transactions: CatalogTree_Computed_Transactions::new(client.clone(), format!("{base_path}_transactions")), + first_p2aaddressindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_p2aaddressindex")), + first_p2pk33addressindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_p2pk33addressindex")), + first_p2pk65addressindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_p2pk65addressindex")), + first_p2pkhaddressindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_p2pkhaddressindex")), + first_p2shaddressindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_p2shaddressindex")), + first_p2traddressindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_p2traddressindex")), + first_p2wpkhaddressindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_p2wpkhaddressindex")), + first_p2wshaddressindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_p2wshaddressindex")), + p2abytes: MetricPattern27::new(client.clone(), format!("{base_path}_p2abytes")), + p2pk33bytes: MetricPattern29::new(client.clone(), format!("{base_path}_p2pk33bytes")), + p2pk65bytes: MetricPattern30::new(client.clone(), format!("{base_path}_p2pk65bytes")), + p2pkhbytes: MetricPattern31::new(client.clone(), format!("{base_path}_p2pkhbytes")), + p2shbytes: MetricPattern32::new(client.clone(), format!("{base_path}_p2shbytes")), + p2trbytes: MetricPattern33::new(client.clone(), format!("{base_path}_p2trbytes")), + p2wpkhbytes: MetricPattern34::new(client.clone(), format!("{base_path}_p2wpkhbytes")), + p2wshbytes: MetricPattern35::new(client.clone(), format!("{base_path}_p2wshbytes")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Blocks { - pub count: CatalogTree_Computed_Blocks_Count, - pub difficulty: CatalogTree_Computed_Blocks_Difficulty, - pub halving: CatalogTree_Computed_Blocks_Halving, - pub interval: CatalogTree_Computed_Blocks_Interval, - pub mining: CatalogTree_Computed_Blocks_Mining, - pub rewards: CatalogTree_Computed_Blocks_Rewards, - pub size: CatalogTree_Computed_Blocks_Size, - pub time: CatalogTree_Computed_Blocks_Time, - pub weight: CatalogTree_Computed_Blocks_Weight, +pub struct CatalogTree_Blocks { + pub blockhash: MetricPattern23, + pub count: CatalogTree_Blocks_Count, + pub difficulty: CatalogTree_Blocks_Difficulty, + pub halving: CatalogTree_Blocks_Halving, + pub interval: IntervalPattern, + pub mining: CatalogTree_Blocks_Mining, + pub rewards: CatalogTree_Blocks_Rewards, + pub size: CatalogTree_Blocks_Size, + pub time: CatalogTree_Blocks_Time, + pub timestamp: MetricPattern23, + pub total_size: MetricPattern23, + pub weight: CatalogTree_Blocks_Weight, } -impl CatalogTree_Computed_Blocks { +impl CatalogTree_Blocks { pub fn new(client: Arc, base_path: String) -> Self { Self { - count: CatalogTree_Computed_Blocks_Count::new(client.clone(), format!("{base_path}_count")), - difficulty: CatalogTree_Computed_Blocks_Difficulty::new(client.clone(), format!("{base_path}_difficulty")), - halving: CatalogTree_Computed_Blocks_Halving::new(client.clone(), format!("{base_path}_halving")), - interval: CatalogTree_Computed_Blocks_Interval::new(client.clone(), format!("{base_path}_interval")), - mining: CatalogTree_Computed_Blocks_Mining::new(client.clone(), format!("{base_path}_mining")), - rewards: CatalogTree_Computed_Blocks_Rewards::new(client.clone(), format!("{base_path}_rewards")), - size: CatalogTree_Computed_Blocks_Size::new(client.clone(), format!("{base_path}_size")), - time: CatalogTree_Computed_Blocks_Time::new(client.clone(), format!("{base_path}_time")), - weight: CatalogTree_Computed_Blocks_Weight::new(client.clone(), format!("{base_path}_weight")), + blockhash: MetricPattern23::new(client.clone(), format!("{base_path}_blockhash")), + count: CatalogTree_Blocks_Count::new(client.clone(), format!("{base_path}_count")), + difficulty: CatalogTree_Blocks_Difficulty::new(client.clone(), format!("{base_path}_difficulty")), + halving: CatalogTree_Blocks_Halving::new(client.clone(), format!("{base_path}_halving")), + interval: IntervalPattern::new(client.clone(), "block_interval".to_string()), + mining: CatalogTree_Blocks_Mining::new(client.clone(), format!("{base_path}_mining")), + rewards: CatalogTree_Blocks_Rewards::new(client.clone(), format!("{base_path}_rewards")), + size: CatalogTree_Blocks_Size::new(client.clone(), format!("{base_path}_size")), + time: CatalogTree_Blocks_Time::new(client.clone(), format!("{base_path}_time")), + timestamp: MetricPattern23::new(client.clone(), format!("{base_path}_timestamp")), + total_size: MetricPattern23::new(client.clone(), format!("{base_path}_total_size")), + weight: CatalogTree_Blocks_Weight::new(client.clone(), format!("{base_path}_weight")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Blocks_Count { - pub _1m_block_count: MetricPattern5, - pub _1w_block_count: MetricPattern5, - pub _1y_block_count: MetricPattern5, - pub _24h_block_count: MetricPattern26, +pub struct CatalogTree_Blocks_Count { + pub _1m_block_count: MetricPattern1, + pub _1m_start: MetricPattern23, + pub _1w_block_count: MetricPattern1, + pub _1w_start: MetricPattern23, + pub _1y_block_count: MetricPattern1, + pub _1y_start: MetricPattern23, + pub _24h_block_count: MetricPattern1, + pub _24h_start: MetricPattern23, pub block_count: BlockCountPattern, - pub block_count_target: MetricPattern5, + pub block_count_target: MetricPattern4, } -impl CatalogTree_Computed_Blocks_Count { +impl CatalogTree_Blocks_Count { pub fn new(client: Arc, base_path: String) -> Self { Self { - _1m_block_count: MetricPattern5::new(client.clone(), format!("{base_path}_1m_block_count")), - _1w_block_count: MetricPattern5::new(client.clone(), format!("{base_path}_1w_block_count")), - _1y_block_count: MetricPattern5::new(client.clone(), format!("{base_path}_1y_block_count")), - _24h_block_count: MetricPattern26::new(client.clone(), format!("{base_path}_24h_block_count")), + _1m_block_count: MetricPattern1::new(client.clone(), format!("{base_path}_1m_block_count")), + _1m_start: MetricPattern23::new(client.clone(), format!("{base_path}_1m_start")), + _1w_block_count: MetricPattern1::new(client.clone(), format!("{base_path}_1w_block_count")), + _1w_start: MetricPattern23::new(client.clone(), format!("{base_path}_1w_start")), + _1y_block_count: MetricPattern1::new(client.clone(), format!("{base_path}_1y_block_count")), + _1y_start: MetricPattern23::new(client.clone(), format!("{base_path}_1y_start")), + _24h_block_count: MetricPattern1::new(client.clone(), format!("{base_path}_24h_block_count")), + _24h_start: MetricPattern23::new(client.clone(), format!("{base_path}_24h_start")), block_count: BlockCountPattern::new(client.clone(), "block_count".to_string()), - block_count_target: MetricPattern5::new(client.clone(), format!("{base_path}_block_count_target")), + block_count_target: MetricPattern4::new(client.clone(), format!("{base_path}_block_count_target")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Blocks_Difficulty { +pub struct CatalogTree_Blocks_Difficulty { + pub base: MetricPattern23, pub blocks_before_next_difficulty_adjustment: MetricPattern1, pub days_before_next_difficulty_adjustment: MetricPattern1, - pub difficultyepoch: MetricPattern5, + pub difficultyepoch: MetricPattern4, } -impl CatalogTree_Computed_Blocks_Difficulty { +impl CatalogTree_Blocks_Difficulty { pub fn new(client: Arc, base_path: String) -> Self { Self { + base: MetricPattern23::new(client.clone(), format!("{base_path}_base")), blocks_before_next_difficulty_adjustment: MetricPattern1::new(client.clone(), format!("{base_path}_blocks_before_next_difficulty_adjustment")), days_before_next_difficulty_adjustment: MetricPattern1::new(client.clone(), format!("{base_path}_days_before_next_difficulty_adjustment")), - difficultyepoch: MetricPattern5::new(client.clone(), format!("{base_path}_difficultyepoch")), + difficultyepoch: MetricPattern4::new(client.clone(), format!("{base_path}_difficultyepoch")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Blocks_Halving { +pub struct CatalogTree_Blocks_Halving { pub blocks_before_next_halving: MetricPattern1, pub days_before_next_halving: MetricPattern1, - pub halvingepoch: MetricPattern5, + pub halvingepoch: MetricPattern4, } -impl CatalogTree_Computed_Blocks_Halving { +impl CatalogTree_Blocks_Halving { pub fn new(client: Arc, base_path: String) -> Self { Self { blocks_before_next_halving: MetricPattern1::new(client.clone(), format!("{base_path}_blocks_before_next_halving")), days_before_next_halving: MetricPattern1::new(client.clone(), format!("{base_path}_days_before_next_halving")), - halvingepoch: MetricPattern5::new(client.clone(), format!("{base_path}_halvingepoch")), + halvingepoch: MetricPattern4::new(client.clone(), format!("{base_path}_halvingepoch")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Blocks_Interval { - pub block_interval: BlockIntervalPattern, - pub interval: MetricPattern26, -} - -impl CatalogTree_Computed_Blocks_Interval { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - block_interval: BlockIntervalPattern::new(client.clone(), "block_interval".to_string()), - interval: MetricPattern26::new(client.clone(), format!("{base_path}_interval")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Blocks_Mining { +pub struct CatalogTree_Blocks_Mining { pub difficulty: MetricPattern2, - pub difficulty_adjustment: DifficultyAdjustmentPattern, + pub difficulty_adjustment: MetricPattern1, pub difficulty_as_hash: MetricPattern1, pub hash_price_phs: MetricPattern1, pub hash_price_phs_min: MetricPattern1, @@ -4723,10 +4371,10 @@ pub struct CatalogTree_Computed_Blocks_Mining { pub hash_price_ths: MetricPattern1, pub hash_price_ths_min: MetricPattern1, pub hash_rate: MetricPattern1, - pub hash_rate_1m_sma: MetricPattern5, - pub hash_rate_1w_sma: MetricPattern5, - pub hash_rate_1y_sma: MetricPattern5, - pub hash_rate_2m_sma: MetricPattern5, + pub hash_rate_1m_sma: MetricPattern4, + pub hash_rate_1w_sma: MetricPattern4, + pub hash_rate_1y_sma: MetricPattern4, + pub hash_rate_2m_sma: MetricPattern4, pub hash_value_phs: MetricPattern1, pub hash_value_phs_min: MetricPattern1, pub hash_value_rebound: MetricPattern1, @@ -4734,11 +4382,11 @@ pub struct CatalogTree_Computed_Blocks_Mining { pub hash_value_ths_min: MetricPattern1, } -impl CatalogTree_Computed_Blocks_Mining { +impl CatalogTree_Blocks_Mining { pub fn new(client: Arc, base_path: String) -> Self { Self { difficulty: MetricPattern2::new(client.clone(), format!("{base_path}_difficulty")), - difficulty_adjustment: DifficultyAdjustmentPattern::new(client.clone(), "difficulty_adjustment".to_string()), + difficulty_adjustment: MetricPattern1::new(client.clone(), format!("{base_path}_difficulty_adjustment")), difficulty_as_hash: MetricPattern1::new(client.clone(), format!("{base_path}_difficulty_as_hash")), hash_price_phs: MetricPattern1::new(client.clone(), format!("{base_path}_hash_price_phs")), hash_price_phs_min: MetricPattern1::new(client.clone(), format!("{base_path}_hash_price_phs_min")), @@ -4746,10 +4394,10 @@ impl CatalogTree_Computed_Blocks_Mining { hash_price_ths: MetricPattern1::new(client.clone(), format!("{base_path}_hash_price_ths")), hash_price_ths_min: MetricPattern1::new(client.clone(), format!("{base_path}_hash_price_ths_min")), hash_rate: MetricPattern1::new(client.clone(), format!("{base_path}_hash_rate")), - hash_rate_1m_sma: MetricPattern5::new(client.clone(), format!("{base_path}_hash_rate_1m_sma")), - hash_rate_1w_sma: MetricPattern5::new(client.clone(), format!("{base_path}_hash_rate_1w_sma")), - hash_rate_1y_sma: MetricPattern5::new(client.clone(), format!("{base_path}_hash_rate_1y_sma")), - hash_rate_2m_sma: MetricPattern5::new(client.clone(), format!("{base_path}_hash_rate_2m_sma")), + hash_rate_1m_sma: MetricPattern4::new(client.clone(), format!("{base_path}_hash_rate_1m_sma")), + hash_rate_1w_sma: MetricPattern4::new(client.clone(), format!("{base_path}_hash_rate_1w_sma")), + hash_rate_1y_sma: MetricPattern4::new(client.clone(), format!("{base_path}_hash_rate_1y_sma")), + hash_rate_2m_sma: MetricPattern4::new(client.clone(), format!("{base_path}_hash_rate_2m_sma")), hash_value_phs: MetricPattern1::new(client.clone(), format!("{base_path}_hash_value_phs")), hash_value_phs_min: MetricPattern1::new(client.clone(), format!("{base_path}_hash_value_phs_min")), hash_value_rebound: MetricPattern1::new(client.clone(), format!("{base_path}_hash_value_rebound")), @@ -4760,77 +4408,79 @@ impl CatalogTree_Computed_Blocks_Mining { } /// Catalog tree node. -pub struct CatalogTree_Computed_Blocks_Rewards { - pub _24h_coinbase_sum: MetricPattern26, - pub _24h_coinbase_usd_sum: MetricPattern26, +pub struct CatalogTree_Blocks_Rewards { + pub _24h_coinbase_sum: MetricPattern23, + pub _24h_coinbase_usd_sum: MetricPattern23, pub coinbase: CoinbasePattern, - pub fee_dominance: MetricPattern22, + pub fee_dominance: MetricPattern19, pub subsidy: CoinbasePattern, - pub subsidy_dominance: MetricPattern22, - pub subsidy_usd_1y_sma: MetricPattern5, + pub subsidy_dominance: MetricPattern19, + pub subsidy_usd_1y_sma: MetricPattern4, pub unclaimed_rewards: UnclaimedRewardsPattern, } -impl CatalogTree_Computed_Blocks_Rewards { +impl CatalogTree_Blocks_Rewards { pub fn new(client: Arc, base_path: String) -> Self { Self { - _24h_coinbase_sum: MetricPattern26::new(client.clone(), format!("{base_path}_24h_coinbase_sum")), - _24h_coinbase_usd_sum: MetricPattern26::new(client.clone(), format!("{base_path}_24h_coinbase_usd_sum")), + _24h_coinbase_sum: MetricPattern23::new(client.clone(), format!("{base_path}_24h_coinbase_sum")), + _24h_coinbase_usd_sum: MetricPattern23::new(client.clone(), format!("{base_path}_24h_coinbase_usd_sum")), coinbase: CoinbasePattern::new(client.clone(), "coinbase".to_string()), - fee_dominance: MetricPattern22::new(client.clone(), format!("{base_path}_fee_dominance")), + fee_dominance: MetricPattern19::new(client.clone(), format!("{base_path}_fee_dominance")), subsidy: CoinbasePattern::new(client.clone(), "subsidy".to_string()), - subsidy_dominance: MetricPattern22::new(client.clone(), format!("{base_path}_subsidy_dominance")), - subsidy_usd_1y_sma: MetricPattern5::new(client.clone(), format!("{base_path}_subsidy_usd_1y_sma")), + subsidy_dominance: MetricPattern19::new(client.clone(), format!("{base_path}_subsidy_dominance")), + subsidy_usd_1y_sma: MetricPattern4::new(client.clone(), format!("{base_path}_subsidy_usd_1y_sma")), unclaimed_rewards: UnclaimedRewardsPattern::new(client.clone(), "unclaimed_rewards".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Blocks_Size { +pub struct CatalogTree_Blocks_Size { pub block_size: BlockSizePattern, pub block_vbytes: BlockSizePattern, - pub vbytes: MetricPattern26, + pub vbytes: MetricPattern23, } -impl CatalogTree_Computed_Blocks_Size { +impl CatalogTree_Blocks_Size { pub fn new(client: Arc, base_path: String) -> Self { Self { block_size: BlockSizePattern::new(client.clone(), "block_size".to_string()), block_vbytes: BlockSizePattern::new(client.clone(), "block_vbytes".to_string()), - vbytes: MetricPattern26::new(client.clone(), format!("{base_path}_vbytes")), + vbytes: MetricPattern23::new(client.clone(), format!("{base_path}_vbytes")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Blocks_Time { - pub date: MetricPattern26, - pub date_fixed: MetricPattern26, +pub struct CatalogTree_Blocks_Time { + pub date: MetricPattern23, + pub date_fixed: MetricPattern23, pub timestamp: MetricPattern2, - pub timestamp_fixed: MetricPattern26, + pub timestamp_fixed: MetricPattern23, } -impl CatalogTree_Computed_Blocks_Time { +impl CatalogTree_Blocks_Time { pub fn new(client: Arc, base_path: String) -> Self { Self { - date: MetricPattern26::new(client.clone(), format!("{base_path}_date")), - date_fixed: MetricPattern26::new(client.clone(), format!("{base_path}_date_fixed")), + date: MetricPattern23::new(client.clone(), format!("{base_path}_date")), + date_fixed: MetricPattern23::new(client.clone(), format!("{base_path}_date_fixed")), timestamp: MetricPattern2::new(client.clone(), format!("{base_path}_timestamp")), - timestamp_fixed: MetricPattern26::new(client.clone(), format!("{base_path}_timestamp_fixed")), + timestamp_fixed: MetricPattern23::new(client.clone(), format!("{base_path}_timestamp_fixed")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Blocks_Weight { +pub struct CatalogTree_Blocks_Weight { + pub base: MetricPattern23, pub block_fullness: BitcoinPattern, pub block_weight: BlockSizePattern, } -impl CatalogTree_Computed_Blocks_Weight { +impl CatalogTree_Blocks_Weight { pub fn new(client: Arc, base_path: String) -> Self { Self { + base: MetricPattern23::new(client.clone(), format!("{base_path}_base")), block_fullness: BitcoinPattern::new(client.clone(), "block_fullness".to_string()), block_weight: BlockSizePattern::new(client.clone(), "block_weight".to_string()), } @@ -4838,30 +4488,30 @@ impl CatalogTree_Computed_Blocks_Weight { } /// Catalog tree node. -pub struct CatalogTree_Computed_Cointime { - pub activity: CatalogTree_Computed_Cointime_Activity, - pub adjusted: CatalogTree_Computed_Cointime_Adjusted, - pub cap: CatalogTree_Computed_Cointime_Cap, - pub pricing: CatalogTree_Computed_Cointime_Pricing, - pub supply: CatalogTree_Computed_Cointime_Supply, - pub value: CatalogTree_Computed_Cointime_Value, +pub struct CatalogTree_Cointime { + pub activity: CatalogTree_Cointime_Activity, + pub adjusted: CatalogTree_Cointime_Adjusted, + pub cap: CatalogTree_Cointime_Cap, + pub pricing: CatalogTree_Cointime_Pricing, + pub supply: CatalogTree_Cointime_Supply, + pub value: CatalogTree_Cointime_Value, } -impl CatalogTree_Computed_Cointime { +impl CatalogTree_Cointime { pub fn new(client: Arc, base_path: String) -> Self { Self { - activity: CatalogTree_Computed_Cointime_Activity::new(client.clone(), format!("{base_path}_activity")), - adjusted: CatalogTree_Computed_Cointime_Adjusted::new(client.clone(), format!("{base_path}_adjusted")), - cap: CatalogTree_Computed_Cointime_Cap::new(client.clone(), format!("{base_path}_cap")), - pricing: CatalogTree_Computed_Cointime_Pricing::new(client.clone(), format!("{base_path}_pricing")), - supply: CatalogTree_Computed_Cointime_Supply::new(client.clone(), format!("{base_path}_supply")), - value: CatalogTree_Computed_Cointime_Value::new(client.clone(), format!("{base_path}_value")), + activity: CatalogTree_Cointime_Activity::new(client.clone(), format!("{base_path}_activity")), + adjusted: CatalogTree_Cointime_Adjusted::new(client.clone(), format!("{base_path}_adjusted")), + cap: CatalogTree_Cointime_Cap::new(client.clone(), format!("{base_path}_cap")), + pricing: CatalogTree_Cointime_Pricing::new(client.clone(), format!("{base_path}_pricing")), + supply: CatalogTree_Cointime_Supply::new(client.clone(), format!("{base_path}_supply")), + value: CatalogTree_Cointime_Value::new(client.clone(), format!("{base_path}_value")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Cointime_Activity { +pub struct CatalogTree_Cointime_Activity { pub activity_to_vaultedness_ratio: MetricPattern1, pub coinblocks_created: BlockCountPattern, pub coinblocks_stored: BlockCountPattern, @@ -4869,7 +4519,7 @@ pub struct CatalogTree_Computed_Cointime_Activity { pub vaultedness: MetricPattern1, } -impl CatalogTree_Computed_Cointime_Activity { +impl CatalogTree_Cointime_Activity { pub fn new(client: Arc, base_path: String) -> Self { Self { activity_to_vaultedness_ratio: MetricPattern1::new(client.clone(), format!("{base_path}_activity_to_vaultedness_ratio")), @@ -4882,24 +4532,24 @@ impl CatalogTree_Computed_Cointime_Activity { } /// Catalog tree node. -pub struct CatalogTree_Computed_Cointime_Adjusted { - pub cointime_adj_inflation_rate: MetricPattern5, - pub cointime_adj_tx_btc_velocity: MetricPattern5, - pub cointime_adj_tx_usd_velocity: MetricPattern5, +pub struct CatalogTree_Cointime_Adjusted { + pub cointime_adj_inflation_rate: MetricPattern4, + pub cointime_adj_tx_btc_velocity: MetricPattern4, + pub cointime_adj_tx_usd_velocity: MetricPattern4, } -impl CatalogTree_Computed_Cointime_Adjusted { +impl CatalogTree_Cointime_Adjusted { pub fn new(client: Arc, base_path: String) -> Self { Self { - cointime_adj_inflation_rate: MetricPattern5::new(client.clone(), format!("{base_path}_cointime_adj_inflation_rate")), - cointime_adj_tx_btc_velocity: MetricPattern5::new(client.clone(), format!("{base_path}_cointime_adj_tx_btc_velocity")), - cointime_adj_tx_usd_velocity: MetricPattern5::new(client.clone(), format!("{base_path}_cointime_adj_tx_usd_velocity")), + cointime_adj_inflation_rate: MetricPattern4::new(client.clone(), format!("{base_path}_cointime_adj_inflation_rate")), + cointime_adj_tx_btc_velocity: MetricPattern4::new(client.clone(), format!("{base_path}_cointime_adj_tx_btc_velocity")), + cointime_adj_tx_usd_velocity: MetricPattern4::new(client.clone(), format!("{base_path}_cointime_adj_tx_usd_velocity")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Cointime_Cap { +pub struct CatalogTree_Cointime_Cap { pub active_cap: MetricPattern1, pub cointime_cap: MetricPattern1, pub investor_cap: MetricPattern1, @@ -4907,7 +4557,7 @@ pub struct CatalogTree_Computed_Cointime_Cap { pub vaulted_cap: MetricPattern1, } -impl CatalogTree_Computed_Cointime_Cap { +impl CatalogTree_Cointime_Cap { pub fn new(client: Arc, base_path: String) -> Self { Self { active_cap: MetricPattern1::new(client.clone(), format!("{base_path}_active_cap")), @@ -4920,7 +4570,7 @@ impl CatalogTree_Computed_Cointime_Cap { } /// Catalog tree node. -pub struct CatalogTree_Computed_Cointime_Pricing { +pub struct CatalogTree_Cointime_Pricing { pub active_price: MetricPattern1, pub active_price_ratio: ActivePriceRatioPattern, pub cointime_price: MetricPattern1, @@ -4931,7 +4581,7 @@ pub struct CatalogTree_Computed_Cointime_Pricing { pub vaulted_price_ratio: ActivePriceRatioPattern, } -impl CatalogTree_Computed_Cointime_Pricing { +impl CatalogTree_Cointime_Pricing { pub fn new(client: Arc, base_path: String) -> Self { Self { active_price: MetricPattern1::new(client.clone(), format!("{base_path}_active_price")), @@ -4947,12 +4597,12 @@ impl CatalogTree_Computed_Cointime_Pricing { } /// Catalog tree node. -pub struct CatalogTree_Computed_Cointime_Supply { +pub struct CatalogTree_Cointime_Supply { pub active_supply: ActiveSupplyPattern, pub vaulted_supply: ActiveSupplyPattern, } -impl CatalogTree_Computed_Cointime_Supply { +impl CatalogTree_Cointime_Supply { pub fn new(client: Arc, base_path: String) -> Self { Self { active_supply: ActiveSupplyPattern::new(client.clone(), "active_supply".to_string()), @@ -4962,13 +4612,13 @@ impl CatalogTree_Computed_Cointime_Supply { } /// Catalog tree node. -pub struct CatalogTree_Computed_Cointime_Value { +pub struct CatalogTree_Cointime_Value { pub cointime_value_created: BlockCountPattern, pub cointime_value_destroyed: BlockCountPattern, pub cointime_value_stored: BlockCountPattern, } -impl CatalogTree_Computed_Cointime_Value { +impl CatalogTree_Cointime_Value { pub fn new(client: Arc, base_path: String) -> Self { Self { cointime_value_created: BlockCountPattern::new(client.clone(), "cointime_value_created".to_string()), @@ -4979,7 +4629,7 @@ impl CatalogTree_Computed_Cointime_Value { } /// Catalog tree node. -pub struct CatalogTree_Computed_Constants { +pub struct CatalogTree_Constants { pub constant_0: MetricPattern3, pub constant_1: MetricPattern3, pub constant_100: MetricPattern3, @@ -5000,7 +4650,7 @@ pub struct CatalogTree_Computed_Constants { pub constant_minus_4: MetricPattern3, } -impl CatalogTree_Computed_Constants { +impl CatalogTree_Constants { pub fn new(client: Arc, base_path: String) -> Self { Self { constant_0: MetricPattern3::new(client.clone(), format!("{base_path}_constant_0")), @@ -5026,61 +4676,53 @@ impl CatalogTree_Computed_Constants { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution { - pub addr_count: MetricPattern1, - pub address_cohorts: CatalogTree_Computed_Distribution_AddressCohorts, - pub addresses_data: CatalogTree_Computed_Distribution_AddressesData, - pub addresstype_to_height_to_addr_count: AddresstypeToHeightToAddrCountPattern, - pub addresstype_to_height_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern, - pub addresstype_to_indexes_to_addr_count: AddresstypeToHeightToAddrCountPattern, - pub addresstype_to_indexes_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern, - pub any_address_indexes: AddresstypeToHeightToAddrCountPattern, - pub chain_state: MetricPattern26, - pub empty_addr_count: MetricPattern1, - pub emptyaddressindex: MetricPattern42, - pub loadedaddressindex: MetricPattern41, - pub utxo_cohorts: CatalogTree_Computed_Distribution_UtxoCohorts, +pub struct CatalogTree_Distribution { + pub addr_count: AddrCountPattern, + pub address_cohorts: CatalogTree_Distribution_AddressCohorts, + pub addresses_data: CatalogTree_Distribution_AddressesData, + pub any_address_indexes: CatalogTree_Distribution_AnyAddressIndexes, + pub chain_state: MetricPattern23, + pub empty_addr_count: AddrCountPattern, + pub emptyaddressindex: MetricPattern39, + pub loadedaddressindex: MetricPattern38, + pub utxo_cohorts: CatalogTree_Distribution_UtxoCohorts, } -impl CatalogTree_Computed_Distribution { +impl CatalogTree_Distribution { pub fn new(client: Arc, base_path: String) -> Self { Self { - addr_count: MetricPattern1::new(client.clone(), format!("{base_path}_addr_count")), - address_cohorts: CatalogTree_Computed_Distribution_AddressCohorts::new(client.clone(), format!("{base_path}_address_cohorts")), - addresses_data: CatalogTree_Computed_Distribution_AddressesData::new(client.clone(), format!("{base_path}_addresses_data")), - addresstype_to_height_to_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "addr_count".to_string()), - addresstype_to_height_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "empty_addr_count".to_string()), - addresstype_to_indexes_to_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "addr_count".to_string()), - addresstype_to_indexes_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "empty_addr_count".to_string()), - any_address_indexes: AddresstypeToHeightToAddrCountPattern::new(client.clone(), "anyaddressindex".to_string()), - chain_state: MetricPattern26::new(client.clone(), format!("{base_path}_chain_state")), - empty_addr_count: MetricPattern1::new(client.clone(), format!("{base_path}_empty_addr_count")), - emptyaddressindex: MetricPattern42::new(client.clone(), format!("{base_path}_emptyaddressindex")), - loadedaddressindex: MetricPattern41::new(client.clone(), format!("{base_path}_loadedaddressindex")), - utxo_cohorts: CatalogTree_Computed_Distribution_UtxoCohorts::new(client.clone(), format!("{base_path}_utxo_cohorts")), + addr_count: AddrCountPattern::new(client.clone(), "addr_count".to_string()), + address_cohorts: CatalogTree_Distribution_AddressCohorts::new(client.clone(), format!("{base_path}_address_cohorts")), + addresses_data: CatalogTree_Distribution_AddressesData::new(client.clone(), format!("{base_path}_addresses_data")), + any_address_indexes: CatalogTree_Distribution_AnyAddressIndexes::new(client.clone(), format!("{base_path}_any_address_indexes")), + chain_state: MetricPattern23::new(client.clone(), format!("{base_path}_chain_state")), + empty_addr_count: AddrCountPattern::new(client.clone(), "empty_addr_count".to_string()), + emptyaddressindex: MetricPattern39::new(client.clone(), format!("{base_path}_emptyaddressindex")), + loadedaddressindex: MetricPattern38::new(client.clone(), format!("{base_path}_loadedaddressindex")), + utxo_cohorts: CatalogTree_Distribution_UtxoCohorts::new(client.clone(), format!("{base_path}_utxo_cohorts")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_AddressCohorts { - pub amount_range: CatalogTree_Computed_Distribution_AddressCohorts_AmountRange, - pub ge_amount: CatalogTree_Computed_Distribution_AddressCohorts_GeAmount, - pub lt_amount: CatalogTree_Computed_Distribution_AddressCohorts_LtAmount, +pub struct CatalogTree_Distribution_AddressCohorts { + pub amount_range: CatalogTree_Distribution_AddressCohorts_AmountRange, + pub ge_amount: CatalogTree_Distribution_AddressCohorts_GeAmount, + pub lt_amount: CatalogTree_Distribution_AddressCohorts_LtAmount, } -impl CatalogTree_Computed_Distribution_AddressCohorts { +impl CatalogTree_Distribution_AddressCohorts { pub fn new(client: Arc, base_path: String) -> Self { Self { - amount_range: CatalogTree_Computed_Distribution_AddressCohorts_AmountRange::new(client.clone(), format!("{base_path}_amount_range")), - ge_amount: CatalogTree_Computed_Distribution_AddressCohorts_GeAmount::new(client.clone(), format!("{base_path}_ge_amount")), - lt_amount: CatalogTree_Computed_Distribution_AddressCohorts_LtAmount::new(client.clone(), format!("{base_path}_lt_amount")), + amount_range: CatalogTree_Distribution_AddressCohorts_AmountRange::new(client.clone(), format!("{base_path}_amount_range")), + ge_amount: CatalogTree_Distribution_AddressCohorts_GeAmount::new(client.clone(), format!("{base_path}_ge_amount")), + lt_amount: CatalogTree_Distribution_AddressCohorts_LtAmount::new(client.clone(), format!("{base_path}_lt_amount")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_AddressCohorts_AmountRange { +pub struct CatalogTree_Distribution_AddressCohorts_AmountRange { pub _0sats: _0satsPattern, pub _100btc_to_1k_btc: _0satsPattern, pub _100k_btc_or_more: _0satsPattern, @@ -5098,7 +4740,7 @@ pub struct CatalogTree_Computed_Distribution_AddressCohorts_AmountRange { pub _1sat_to_10sats: _0satsPattern, } -impl CatalogTree_Computed_Distribution_AddressCohorts_AmountRange { +impl CatalogTree_Distribution_AddressCohorts_AmountRange { pub fn new(client: Arc, base_path: String) -> Self { Self { _0sats: _0satsPattern::new(client.clone(), "addrs_with_0sats".to_string()), @@ -5121,7 +4763,7 @@ impl CatalogTree_Computed_Distribution_AddressCohorts_AmountRange { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_AddressCohorts_GeAmount { +pub struct CatalogTree_Distribution_AddressCohorts_GeAmount { pub _100btc: _0satsPattern, pub _100k_sats: _0satsPattern, pub _100sats: _0satsPattern, @@ -5137,7 +4779,7 @@ pub struct CatalogTree_Computed_Distribution_AddressCohorts_GeAmount { pub _1sat: _0satsPattern, } -impl CatalogTree_Computed_Distribution_AddressCohorts_GeAmount { +impl CatalogTree_Distribution_AddressCohorts_GeAmount { pub fn new(client: Arc, base_path: String) -> Self { Self { _100btc: _0satsPattern::new(client.clone(), "addrs_above_100btc".to_string()), @@ -5158,7 +4800,7 @@ impl CatalogTree_Computed_Distribution_AddressCohorts_GeAmount { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_AddressCohorts_LtAmount { +pub struct CatalogTree_Distribution_AddressCohorts_LtAmount { pub _100btc: _0satsPattern, pub _100k_btc: _0satsPattern, pub _100k_sats: _0satsPattern, @@ -5174,7 +4816,7 @@ pub struct CatalogTree_Computed_Distribution_AddressCohorts_LtAmount { pub _1m_sats: _0satsPattern, } -impl CatalogTree_Computed_Distribution_AddressCohorts_LtAmount { +impl CatalogTree_Distribution_AddressCohorts_LtAmount { pub fn new(client: Arc, base_path: String) -> Self { Self { _100btc: _0satsPattern::new(client.clone(), "addrs_under_100btc".to_string()), @@ -5195,55 +4837,82 @@ impl CatalogTree_Computed_Distribution_AddressCohorts_LtAmount { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_AddressesData { - pub empty: MetricPattern42, - pub loaded: MetricPattern41, +pub struct CatalogTree_Distribution_AddressesData { + pub empty: MetricPattern39, + pub loaded: MetricPattern38, } -impl CatalogTree_Computed_Distribution_AddressesData { +impl CatalogTree_Distribution_AddressesData { pub fn new(client: Arc, base_path: String) -> Self { Self { - empty: MetricPattern42::new(client.clone(), format!("{base_path}_empty")), - loaded: MetricPattern41::new(client.clone(), format!("{base_path}_loaded")), + empty: MetricPattern39::new(client.clone(), format!("{base_path}_empty")), + loaded: MetricPattern38::new(client.clone(), format!("{base_path}_loaded")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts { - pub age_range: CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange, - pub all: CatalogTree_Computed_Distribution_UtxoCohorts_All, - pub amount_range: CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange, - pub epoch: CatalogTree_Computed_Distribution_UtxoCohorts_Epoch, - pub ge_amount: CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount, - pub lt_amount: CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount, - pub max_age: CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge, - pub min_age: CatalogTree_Computed_Distribution_UtxoCohorts_MinAge, - pub term: CatalogTree_Computed_Distribution_UtxoCohorts_Term, - pub type_: CatalogTree_Computed_Distribution_UtxoCohorts_Type, - pub year: CatalogTree_Computed_Distribution_UtxoCohorts_Year, +pub struct CatalogTree_Distribution_AnyAddressIndexes { + pub p2a: MetricPattern27, + pub p2pk33: MetricPattern29, + pub p2pk65: MetricPattern30, + pub p2pkh: MetricPattern31, + pub p2sh: MetricPattern32, + pub p2tr: MetricPattern33, + pub p2wpkh: MetricPattern34, + pub p2wsh: MetricPattern35, } -impl CatalogTree_Computed_Distribution_UtxoCohorts { +impl CatalogTree_Distribution_AnyAddressIndexes { pub fn new(client: Arc, base_path: String) -> Self { Self { - age_range: CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange::new(client.clone(), format!("{base_path}_age_range")), - all: CatalogTree_Computed_Distribution_UtxoCohorts_All::new(client.clone(), format!("{base_path}_all")), - amount_range: CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange::new(client.clone(), format!("{base_path}_amount_range")), - epoch: CatalogTree_Computed_Distribution_UtxoCohorts_Epoch::new(client.clone(), format!("{base_path}_epoch")), - ge_amount: CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount::new(client.clone(), format!("{base_path}_ge_amount")), - lt_amount: CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount::new(client.clone(), format!("{base_path}_lt_amount")), - max_age: CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge::new(client.clone(), format!("{base_path}_max_age")), - min_age: CatalogTree_Computed_Distribution_UtxoCohorts_MinAge::new(client.clone(), format!("{base_path}_min_age")), - term: CatalogTree_Computed_Distribution_UtxoCohorts_Term::new(client.clone(), format!("{base_path}_term")), - type_: CatalogTree_Computed_Distribution_UtxoCohorts_Type::new(client.clone(), format!("{base_path}_type_")), - year: CatalogTree_Computed_Distribution_UtxoCohorts_Year::new(client.clone(), format!("{base_path}_year")), + p2a: MetricPattern27::new(client.clone(), format!("{base_path}_p2a")), + p2pk33: MetricPattern29::new(client.clone(), format!("{base_path}_p2pk33")), + p2pk65: MetricPattern30::new(client.clone(), format!("{base_path}_p2pk65")), + p2pkh: MetricPattern31::new(client.clone(), format!("{base_path}_p2pkh")), + p2sh: MetricPattern32::new(client.clone(), format!("{base_path}_p2sh")), + p2tr: MetricPattern33::new(client.clone(), format!("{base_path}_p2tr")), + p2wpkh: MetricPattern34::new(client.clone(), format!("{base_path}_p2wpkh")), + p2wsh: MetricPattern35::new(client.clone(), format!("{base_path}_p2wsh")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange { +pub struct CatalogTree_Distribution_UtxoCohorts { + pub age_range: CatalogTree_Distribution_UtxoCohorts_AgeRange, + pub all: CatalogTree_Distribution_UtxoCohorts_All, + pub amount_range: CatalogTree_Distribution_UtxoCohorts_AmountRange, + pub epoch: CatalogTree_Distribution_UtxoCohorts_Epoch, + pub ge_amount: CatalogTree_Distribution_UtxoCohorts_GeAmount, + pub lt_amount: CatalogTree_Distribution_UtxoCohorts_LtAmount, + pub max_age: CatalogTree_Distribution_UtxoCohorts_MaxAge, + pub min_age: CatalogTree_Distribution_UtxoCohorts_MinAge, + pub term: CatalogTree_Distribution_UtxoCohorts_Term, + pub type_: CatalogTree_Distribution_UtxoCohorts_Type, + pub year: CatalogTree_Distribution_UtxoCohorts_Year, +} + +impl CatalogTree_Distribution_UtxoCohorts { + pub fn new(client: Arc, base_path: String) -> Self { + Self { + age_range: CatalogTree_Distribution_UtxoCohorts_AgeRange::new(client.clone(), format!("{base_path}_age_range")), + all: CatalogTree_Distribution_UtxoCohorts_All::new(client.clone(), format!("{base_path}_all")), + amount_range: CatalogTree_Distribution_UtxoCohorts_AmountRange::new(client.clone(), format!("{base_path}_amount_range")), + epoch: CatalogTree_Distribution_UtxoCohorts_Epoch::new(client.clone(), format!("{base_path}_epoch")), + ge_amount: CatalogTree_Distribution_UtxoCohorts_GeAmount::new(client.clone(), format!("{base_path}_ge_amount")), + lt_amount: CatalogTree_Distribution_UtxoCohorts_LtAmount::new(client.clone(), format!("{base_path}_lt_amount")), + max_age: CatalogTree_Distribution_UtxoCohorts_MaxAge::new(client.clone(), format!("{base_path}_max_age")), + min_age: CatalogTree_Distribution_UtxoCohorts_MinAge::new(client.clone(), format!("{base_path}_min_age")), + term: CatalogTree_Distribution_UtxoCohorts_Term::new(client.clone(), format!("{base_path}_term")), + type_: CatalogTree_Distribution_UtxoCohorts_Type::new(client.clone(), format!("{base_path}_type_")), + year: CatalogTree_Distribution_UtxoCohorts_Year::new(client.clone(), format!("{base_path}_year")), + } + } +} + +/// Catalog tree node. +pub struct CatalogTree_Distribution_UtxoCohorts_AgeRange { pub _10y_to_12y: _10yTo12yPattern, pub _12y_to_15y: _10yTo12yPattern, pub _1d_to_1w: _10yTo12yPattern, @@ -5267,7 +4936,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange { pub up_to_1h: _10yTo12yPattern, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange { +impl CatalogTree_Distribution_UtxoCohorts_AgeRange { pub fn new(client: Arc, base_path: String) -> Self { Self { _10y_to_12y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_10y_up_to_12y_old".to_string()), @@ -5296,30 +4965,30 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_All { +pub struct CatalogTree_Distribution_UtxoCohorts_All { pub activity: ActivityPattern2, pub cost_basis: CostBasisPattern2, pub realized: RealizedPattern3, - pub relative: CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative, - pub supply: SupplyPattern3, + pub relative: CatalogTree_Distribution_UtxoCohorts_All_Relative, + pub supply: SupplyPattern2, pub unrealized: UnrealizedPattern, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_All { +impl CatalogTree_Distribution_UtxoCohorts_All { pub fn new(client: Arc, base_path: String) -> Self { Self { activity: ActivityPattern2::new(client.clone(), "".to_string()), cost_basis: CostBasisPattern2::new(client.clone(), "".to_string()), realized: RealizedPattern3::new(client.clone(), "".to_string()), - relative: CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative::new(client.clone(), format!("{base_path}_relative")), - supply: SupplyPattern3::new(client.clone(), "".to_string()), + relative: CatalogTree_Distribution_UtxoCohorts_All_Relative::new(client.clone(), format!("{base_path}_relative")), + supply: SupplyPattern2::new(client.clone(), "".to_string()), unrealized: UnrealizedPattern::new(client.clone(), "".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative { +pub struct CatalogTree_Distribution_UtxoCohorts_All_Relative { pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern3, pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3, pub supply_in_loss_rel_to_own_supply: MetricPattern3, @@ -5328,7 +4997,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative { pub unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern3, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative { +impl CatalogTree_Distribution_UtxoCohorts_All_Relative { pub fn new(client: Arc, base_path: String) -> Self { Self { neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern3::new(client.clone(), format!("{base_path}_neg_unrealized_loss_rel_to_own_total_unrealized_pnl")), @@ -5342,7 +5011,7 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange { +pub struct CatalogTree_Distribution_UtxoCohorts_AmountRange { pub _0sats: _0satsPattern2, pub _100btc_to_1k_btc: _0satsPattern2, pub _100k_btc_or_more: _0satsPattern2, @@ -5360,7 +5029,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange { pub _1sat_to_10sats: _0satsPattern2, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange { +impl CatalogTree_Distribution_UtxoCohorts_AmountRange { pub fn new(client: Arc, base_path: String) -> Self { Self { _0sats: _0satsPattern2::new(client.clone(), "utxos_with_0sats".to_string()), @@ -5383,7 +5052,7 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Epoch { +pub struct CatalogTree_Distribution_UtxoCohorts_Epoch { pub _0: _0satsPattern2, pub _1: _0satsPattern2, pub _2: _0satsPattern2, @@ -5391,7 +5060,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Epoch { pub _4: _0satsPattern2, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_Epoch { +impl CatalogTree_Distribution_UtxoCohorts_Epoch { pub fn new(client: Arc, base_path: String) -> Self { Self { _0: _0satsPattern2::new(client.clone(), "epoch_0".to_string()), @@ -5404,7 +5073,7 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_Epoch { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount { +pub struct CatalogTree_Distribution_UtxoCohorts_GeAmount { pub _100btc: _100btcPattern, pub _100k_sats: _100btcPattern, pub _100sats: _100btcPattern, @@ -5420,7 +5089,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount { pub _1sat: _100btcPattern, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount { +impl CatalogTree_Distribution_UtxoCohorts_GeAmount { pub fn new(client: Arc, base_path: String) -> Self { Self { _100btc: _100btcPattern::new(client.clone(), "utxos_above_100btc".to_string()), @@ -5441,7 +5110,7 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount { +pub struct CatalogTree_Distribution_UtxoCohorts_LtAmount { pub _100btc: _100btcPattern, pub _100k_btc: _100btcPattern, pub _100k_sats: _100btcPattern, @@ -5457,7 +5126,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount { pub _1m_sats: _100btcPattern, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount { +impl CatalogTree_Distribution_UtxoCohorts_LtAmount { pub fn new(client: Arc, base_path: String) -> Self { Self { _100btc: _100btcPattern::new(client.clone(), "utxos_under_100btc".to_string()), @@ -5478,7 +5147,7 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge { +pub struct CatalogTree_Distribution_UtxoCohorts_MaxAge { pub _10y: _10yPattern, pub _12y: _10yPattern, pub _15y: _10yPattern, @@ -5499,7 +5168,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge { pub _8y: _10yPattern, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge { +impl CatalogTree_Distribution_UtxoCohorts_MaxAge { pub fn new(client: Arc, base_path: String) -> Self { Self { _10y: _10yPattern::new(client.clone(), "utxos_up_to_10y_old".to_string()), @@ -5525,7 +5194,7 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_MinAge { +pub struct CatalogTree_Distribution_UtxoCohorts_MinAge { pub _10y: _100btcPattern, pub _12y: _100btcPattern, pub _1d: _100btcPattern, @@ -5546,7 +5215,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_MinAge { pub _8y: _100btcPattern, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_MinAge { +impl CatalogTree_Distribution_UtxoCohorts_MinAge { pub fn new(client: Arc, base_path: String) -> Self { Self { _10y: _100btcPattern::new(client.clone(), "utxos_at_least_10y_old".to_string()), @@ -5572,68 +5241,68 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_MinAge { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Term { - pub long: CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long, - pub short: CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short, +pub struct CatalogTree_Distribution_UtxoCohorts_Term { + pub long: CatalogTree_Distribution_UtxoCohorts_Term_Long, + pub short: CatalogTree_Distribution_UtxoCohorts_Term_Short, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_Term { +impl CatalogTree_Distribution_UtxoCohorts_Term { pub fn new(client: Arc, base_path: String) -> Self { Self { - long: CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long::new(client.clone(), format!("{base_path}_long")), - short: CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short::new(client.clone(), format!("{base_path}_short")), + long: CatalogTree_Distribution_UtxoCohorts_Term_Long::new(client.clone(), format!("{base_path}_long")), + short: CatalogTree_Distribution_UtxoCohorts_Term_Short::new(client.clone(), format!("{base_path}_short")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long { +pub struct CatalogTree_Distribution_UtxoCohorts_Term_Long { pub activity: ActivityPattern2, pub cost_basis: CostBasisPattern2, pub realized: RealizedPattern2, pub relative: RelativePattern5, - pub supply: SupplyPattern3, + pub supply: SupplyPattern2, pub unrealized: UnrealizedPattern, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long { +impl CatalogTree_Distribution_UtxoCohorts_Term_Long { pub fn new(client: Arc, base_path: String) -> Self { Self { activity: ActivityPattern2::new(client.clone(), "lth".to_string()), cost_basis: CostBasisPattern2::new(client.clone(), "lth".to_string()), realized: RealizedPattern2::new(client.clone(), "lth".to_string()), relative: RelativePattern5::new(client.clone(), "lth".to_string()), - supply: SupplyPattern3::new(client.clone(), "lth".to_string()), + supply: SupplyPattern2::new(client.clone(), "lth".to_string()), unrealized: UnrealizedPattern::new(client.clone(), "lth".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short { +pub struct CatalogTree_Distribution_UtxoCohorts_Term_Short { pub activity: ActivityPattern2, pub cost_basis: CostBasisPattern2, pub realized: RealizedPattern3, pub relative: RelativePattern5, - pub supply: SupplyPattern3, + pub supply: SupplyPattern2, pub unrealized: UnrealizedPattern, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short { +impl CatalogTree_Distribution_UtxoCohorts_Term_Short { pub fn new(client: Arc, base_path: String) -> Self { Self { activity: ActivityPattern2::new(client.clone(), "sth".to_string()), cost_basis: CostBasisPattern2::new(client.clone(), "sth".to_string()), realized: RealizedPattern3::new(client.clone(), "sth".to_string()), relative: RelativePattern5::new(client.clone(), "sth".to_string()), - supply: SupplyPattern3::new(client.clone(), "sth".to_string()), + supply: SupplyPattern2::new(client.clone(), "sth".to_string()), unrealized: UnrealizedPattern::new(client.clone(), "sth".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Type { +pub struct CatalogTree_Distribution_UtxoCohorts_Type { pub empty: _0satsPattern2, pub p2a: _0satsPattern2, pub p2ms: _0satsPattern2, @@ -5647,7 +5316,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Type { pub unknown: _0satsPattern2, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_Type { +impl CatalogTree_Distribution_UtxoCohorts_Type { pub fn new(client: Arc, base_path: String) -> Self { Self { empty: _0satsPattern2::new(client.clone(), "empty_outputs".to_string()), @@ -5666,7 +5335,7 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_Type { } /// Catalog tree node. -pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Year { +pub struct CatalogTree_Distribution_UtxoCohorts_Year { pub _2009: _0satsPattern2, pub _2010: _0satsPattern2, pub _2011: _0satsPattern2, @@ -5687,7 +5356,7 @@ pub struct CatalogTree_Computed_Distribution_UtxoCohorts_Year { pub _2026: _0satsPattern2, } -impl CatalogTree_Computed_Distribution_UtxoCohorts_Year { +impl CatalogTree_Distribution_UtxoCohorts_Year { pub fn new(client: Arc, base_path: String) -> Self { Self { _2009: _0satsPattern2::new(client.clone(), "year_2009".to_string()), @@ -5713,246 +5382,256 @@ impl CatalogTree_Computed_Distribution_UtxoCohorts_Year { } /// Catalog tree node. -pub struct CatalogTree_Computed_Indexes { - pub address: CatalogTree_Computed_Indexes_Address, - pub block: CatalogTree_Computed_Indexes_Block, - pub time: CatalogTree_Computed_Indexes_Time, - pub transaction: CatalogTree_Computed_Indexes_Transaction, +pub struct CatalogTree_Indexes { + pub address: CatalogTree_Indexes_Address, + pub block: CatalogTree_Indexes_Block, + pub time: CatalogTree_Indexes_Time, + pub transaction: CatalogTree_Indexes_Transaction, } -impl CatalogTree_Computed_Indexes { +impl CatalogTree_Indexes { pub fn new(client: Arc, base_path: String) -> Self { Self { - address: CatalogTree_Computed_Indexes_Address::new(client.clone(), format!("{base_path}_address")), - block: CatalogTree_Computed_Indexes_Block::new(client.clone(), format!("{base_path}_block")), - time: CatalogTree_Computed_Indexes_Time::new(client.clone(), format!("{base_path}_time")), - transaction: CatalogTree_Computed_Indexes_Transaction::new(client.clone(), format!("{base_path}_transaction")), + address: CatalogTree_Indexes_Address::new(client.clone(), format!("{base_path}_address")), + block: CatalogTree_Indexes_Block::new(client.clone(), format!("{base_path}_block")), + time: CatalogTree_Indexes_Time::new(client.clone(), format!("{base_path}_time")), + transaction: CatalogTree_Indexes_Transaction::new(client.clone(), format!("{base_path}_transaction")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Indexes_Address { - pub emptyoutputindex: MetricPattern25, - pub opreturnindex: MetricPattern28, - pub p2aaddressindex: MetricPattern30, - pub p2msoutputindex: MetricPattern31, - pub p2pk33addressindex: MetricPattern32, - pub p2pk65addressindex: MetricPattern33, - pub p2pkhaddressindex: MetricPattern34, - pub p2shaddressindex: MetricPattern35, - pub p2traddressindex: MetricPattern36, - pub p2wpkhaddressindex: MetricPattern37, - pub p2wshaddressindex: MetricPattern38, - pub unknownoutputindex: MetricPattern40, +pub struct CatalogTree_Indexes_Address { + pub emptyoutputindex: MetricPattern22, + pub opreturnindex: MetricPattern25, + pub p2aaddressindex: MetricPattern27, + pub p2msoutputindex: MetricPattern28, + pub p2pk33addressindex: MetricPattern29, + pub p2pk65addressindex: MetricPattern30, + pub p2pkhaddressindex: MetricPattern31, + pub p2shaddressindex: MetricPattern32, + pub p2traddressindex: MetricPattern33, + pub p2wpkhaddressindex: MetricPattern34, + pub p2wshaddressindex: MetricPattern35, + pub unknownoutputindex: MetricPattern37, } -impl CatalogTree_Computed_Indexes_Address { +impl CatalogTree_Indexes_Address { pub fn new(client: Arc, base_path: String) -> Self { Self { - emptyoutputindex: MetricPattern25::new(client.clone(), format!("{base_path}_emptyoutputindex")), - opreturnindex: MetricPattern28::new(client.clone(), format!("{base_path}_opreturnindex")), - p2aaddressindex: MetricPattern30::new(client.clone(), format!("{base_path}_p2aaddressindex")), - p2msoutputindex: MetricPattern31::new(client.clone(), format!("{base_path}_p2msoutputindex")), - p2pk33addressindex: MetricPattern32::new(client.clone(), format!("{base_path}_p2pk33addressindex")), - p2pk65addressindex: MetricPattern33::new(client.clone(), format!("{base_path}_p2pk65addressindex")), - p2pkhaddressindex: MetricPattern34::new(client.clone(), format!("{base_path}_p2pkhaddressindex")), - p2shaddressindex: MetricPattern35::new(client.clone(), format!("{base_path}_p2shaddressindex")), - p2traddressindex: MetricPattern36::new(client.clone(), format!("{base_path}_p2traddressindex")), - p2wpkhaddressindex: MetricPattern37::new(client.clone(), format!("{base_path}_p2wpkhaddressindex")), - p2wshaddressindex: MetricPattern38::new(client.clone(), format!("{base_path}_p2wshaddressindex")), - unknownoutputindex: MetricPattern40::new(client.clone(), format!("{base_path}_unknownoutputindex")), + emptyoutputindex: MetricPattern22::new(client.clone(), format!("{base_path}_emptyoutputindex")), + opreturnindex: MetricPattern25::new(client.clone(), format!("{base_path}_opreturnindex")), + p2aaddressindex: MetricPattern27::new(client.clone(), format!("{base_path}_p2aaddressindex")), + p2msoutputindex: MetricPattern28::new(client.clone(), format!("{base_path}_p2msoutputindex")), + p2pk33addressindex: MetricPattern29::new(client.clone(), format!("{base_path}_p2pk33addressindex")), + p2pk65addressindex: MetricPattern30::new(client.clone(), format!("{base_path}_p2pk65addressindex")), + p2pkhaddressindex: MetricPattern31::new(client.clone(), format!("{base_path}_p2pkhaddressindex")), + p2shaddressindex: MetricPattern32::new(client.clone(), format!("{base_path}_p2shaddressindex")), + p2traddressindex: MetricPattern33::new(client.clone(), format!("{base_path}_p2traddressindex")), + p2wpkhaddressindex: MetricPattern34::new(client.clone(), format!("{base_path}_p2wpkhaddressindex")), + p2wshaddressindex: MetricPattern35::new(client.clone(), format!("{base_path}_p2wshaddressindex")), + unknownoutputindex: MetricPattern37::new(client.clone(), format!("{base_path}_unknownoutputindex")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Indexes_Block { - pub dateindex: MetricPattern26, - pub difficultyepoch: MetricPattern15, - pub first_height: MetricPattern14, - pub halvingepoch: MetricPattern16, - pub height: MetricPattern26, - pub height_count: MetricPattern24, - pub txindex_count: MetricPattern26, +pub struct CatalogTree_Indexes_Block { + pub dateindex: MetricPattern23, + pub difficultyepoch: MetricPattern12, + pub first_height: MetricPattern11, + pub halvingepoch: MetricPattern13, + pub height: MetricPattern23, + pub height_count: MetricPattern21, + pub txindex_count: MetricPattern23, } -impl CatalogTree_Computed_Indexes_Block { +impl CatalogTree_Indexes_Block { pub fn new(client: Arc, base_path: String) -> Self { Self { - dateindex: MetricPattern26::new(client.clone(), format!("{base_path}_dateindex")), - difficultyepoch: MetricPattern15::new(client.clone(), format!("{base_path}_difficultyepoch")), - first_height: MetricPattern14::new(client.clone(), format!("{base_path}_first_height")), - halvingepoch: MetricPattern16::new(client.clone(), format!("{base_path}_halvingepoch")), - height: MetricPattern26::new(client.clone(), format!("{base_path}_height")), - height_count: MetricPattern24::new(client.clone(), format!("{base_path}_height_count")), - txindex_count: MetricPattern26::new(client.clone(), format!("{base_path}_txindex_count")), + dateindex: MetricPattern23::new(client.clone(), format!("{base_path}_dateindex")), + difficultyepoch: MetricPattern12::new(client.clone(), format!("{base_path}_difficultyepoch")), + first_height: MetricPattern11::new(client.clone(), format!("{base_path}_first_height")), + halvingepoch: MetricPattern13::new(client.clone(), format!("{base_path}_halvingepoch")), + height: MetricPattern23::new(client.clone(), format!("{base_path}_height")), + height_count: MetricPattern21::new(client.clone(), format!("{base_path}_height_count")), + txindex_count: MetricPattern23::new(client.clone(), format!("{base_path}_txindex_count")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Indexes_Time { - pub date: MetricPattern22, - pub dateindex: MetricPattern22, - pub dateindex_count: MetricPattern20, - pub decadeindex: MetricPattern13, - pub first_dateindex: MetricPattern20, - pub first_height: MetricPattern22, - pub first_monthindex: MetricPattern9, - pub first_yearindex: MetricPattern23, - pub height_count: MetricPattern22, - pub monthindex: MetricPattern11, - pub monthindex_count: MetricPattern9, - pub quarterindex: MetricPattern18, - pub semesterindex: MetricPattern19, - pub weekindex: MetricPattern12, - pub yearindex: MetricPattern21, - pub yearindex_count: MetricPattern23, +pub struct CatalogTree_Indexes_Time { + pub date: MetricPattern19, + pub dateindex: MetricPattern19, + pub dateindex_count: MetricPattern17, + pub decadeindex: MetricPattern10, + pub first_dateindex: MetricPattern17, + pub first_height: MetricPattern19, + pub first_monthindex: MetricPattern6, + pub first_yearindex: MetricPattern20, + pub height_count: MetricPattern19, + pub monthindex: MetricPattern8, + pub monthindex_count: MetricPattern6, + pub quarterindex: MetricPattern15, + pub semesterindex: MetricPattern16, + pub weekindex: MetricPattern9, + pub yearindex: MetricPattern18, + pub yearindex_count: MetricPattern20, } -impl CatalogTree_Computed_Indexes_Time { +impl CatalogTree_Indexes_Time { pub fn new(client: Arc, base_path: String) -> Self { Self { - date: MetricPattern22::new(client.clone(), format!("{base_path}_date")), - dateindex: MetricPattern22::new(client.clone(), format!("{base_path}_dateindex")), - dateindex_count: MetricPattern20::new(client.clone(), format!("{base_path}_dateindex_count")), - decadeindex: MetricPattern13::new(client.clone(), format!("{base_path}_decadeindex")), - first_dateindex: MetricPattern20::new(client.clone(), format!("{base_path}_first_dateindex")), - first_height: MetricPattern22::new(client.clone(), format!("{base_path}_first_height")), - first_monthindex: MetricPattern9::new(client.clone(), format!("{base_path}_first_monthindex")), - first_yearindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_yearindex")), - height_count: MetricPattern22::new(client.clone(), format!("{base_path}_height_count")), - monthindex: MetricPattern11::new(client.clone(), format!("{base_path}_monthindex")), - monthindex_count: MetricPattern9::new(client.clone(), format!("{base_path}_monthindex_count")), - quarterindex: MetricPattern18::new(client.clone(), format!("{base_path}_quarterindex")), - semesterindex: MetricPattern19::new(client.clone(), format!("{base_path}_semesterindex")), - weekindex: MetricPattern12::new(client.clone(), format!("{base_path}_weekindex")), - yearindex: MetricPattern21::new(client.clone(), format!("{base_path}_yearindex")), - yearindex_count: MetricPattern23::new(client.clone(), format!("{base_path}_yearindex_count")), + date: MetricPattern19::new(client.clone(), format!("{base_path}_date")), + dateindex: MetricPattern19::new(client.clone(), format!("{base_path}_dateindex")), + dateindex_count: MetricPattern17::new(client.clone(), format!("{base_path}_dateindex_count")), + decadeindex: MetricPattern10::new(client.clone(), format!("{base_path}_decadeindex")), + first_dateindex: MetricPattern17::new(client.clone(), format!("{base_path}_first_dateindex")), + first_height: MetricPattern19::new(client.clone(), format!("{base_path}_first_height")), + first_monthindex: MetricPattern6::new(client.clone(), format!("{base_path}_first_monthindex")), + first_yearindex: MetricPattern20::new(client.clone(), format!("{base_path}_first_yearindex")), + height_count: MetricPattern19::new(client.clone(), format!("{base_path}_height_count")), + monthindex: MetricPattern8::new(client.clone(), format!("{base_path}_monthindex")), + monthindex_count: MetricPattern6::new(client.clone(), format!("{base_path}_monthindex_count")), + quarterindex: MetricPattern15::new(client.clone(), format!("{base_path}_quarterindex")), + semesterindex: MetricPattern16::new(client.clone(), format!("{base_path}_semesterindex")), + weekindex: MetricPattern9::new(client.clone(), format!("{base_path}_weekindex")), + yearindex: MetricPattern18::new(client.clone(), format!("{base_path}_yearindex")), + yearindex_count: MetricPattern20::new(client.clone(), format!("{base_path}_yearindex_count")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Indexes_Transaction { - pub input_count: MetricPattern39, - pub output_count: MetricPattern39, - pub txindex: MetricPattern39, - pub txinindex: MetricPattern27, - pub txoutindex: MetricPattern29, +pub struct CatalogTree_Indexes_Transaction { + pub input_count: MetricPattern36, + pub output_count: MetricPattern36, + pub txindex: MetricPattern36, + pub txinindex: MetricPattern24, + pub txoutindex: MetricPattern26, } -impl CatalogTree_Computed_Indexes_Transaction { +impl CatalogTree_Indexes_Transaction { pub fn new(client: Arc, base_path: String) -> Self { Self { - input_count: MetricPattern39::new(client.clone(), format!("{base_path}_input_count")), - output_count: MetricPattern39::new(client.clone(), format!("{base_path}_output_count")), - txindex: MetricPattern39::new(client.clone(), format!("{base_path}_txindex")), - txinindex: MetricPattern27::new(client.clone(), format!("{base_path}_txinindex")), - txoutindex: MetricPattern29::new(client.clone(), format!("{base_path}_txoutindex")), + input_count: MetricPattern36::new(client.clone(), format!("{base_path}_input_count")), + output_count: MetricPattern36::new(client.clone(), format!("{base_path}_output_count")), + txindex: MetricPattern36::new(client.clone(), format!("{base_path}_txindex")), + txinindex: MetricPattern24::new(client.clone(), format!("{base_path}_txinindex")), + txoutindex: MetricPattern26::new(client.clone(), format!("{base_path}_txoutindex")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Inputs { - pub count: CatalogTree_Computed_Inputs_Count, - pub spent: CatalogTree_Computed_Inputs_Spent, +pub struct CatalogTree_Inputs { + pub count: CatalogTree_Inputs_Count, + pub first_txinindex: MetricPattern23, + pub outpoint: MetricPattern24, + pub outputtype: MetricPattern24, + pub spent: CatalogTree_Inputs_Spent, + pub txindex: MetricPattern24, + pub typeindex: MetricPattern24, } -impl CatalogTree_Computed_Inputs { +impl CatalogTree_Inputs { pub fn new(client: Arc, base_path: String) -> Self { Self { - count: CatalogTree_Computed_Inputs_Count::new(client.clone(), format!("{base_path}_count")), - spent: CatalogTree_Computed_Inputs_Spent::new(client.clone(), format!("{base_path}_spent")), + count: CatalogTree_Inputs_Count::new(client.clone(), format!("{base_path}_count")), + first_txinindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_txinindex")), + outpoint: MetricPattern24::new(client.clone(), format!("{base_path}_outpoint")), + outputtype: MetricPattern24::new(client.clone(), format!("{base_path}_outputtype")), + spent: CatalogTree_Inputs_Spent::new(client.clone(), format!("{base_path}_spent")), + txindex: MetricPattern24::new(client.clone(), format!("{base_path}_txindex")), + typeindex: MetricPattern24::new(client.clone(), format!("{base_path}_typeindex")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Inputs_Count { - pub count: CountPattern2, +pub struct CatalogTree_Inputs_Count { + pub count: BlockSizePattern, } -impl CatalogTree_Computed_Inputs_Count { +impl CatalogTree_Inputs_Count { pub fn new(client: Arc, base_path: String) -> Self { Self { - count: CountPattern2::new(client.clone(), "input_count".to_string()), + count: BlockSizePattern::new(client.clone(), "input_count".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Inputs_Spent { - pub txoutindex: MetricPattern27, - pub value: MetricPattern27, +pub struct CatalogTree_Inputs_Spent { + pub txoutindex: MetricPattern24, + pub value: MetricPattern24, } -impl CatalogTree_Computed_Inputs_Spent { +impl CatalogTree_Inputs_Spent { pub fn new(client: Arc, base_path: String) -> Self { Self { - txoutindex: MetricPattern27::new(client.clone(), format!("{base_path}_txoutindex")), - value: MetricPattern27::new(client.clone(), format!("{base_path}_value")), + txoutindex: MetricPattern24::new(client.clone(), format!("{base_path}_txoutindex")), + value: MetricPattern24::new(client.clone(), format!("{base_path}_value")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Market { - pub ath: CatalogTree_Computed_Market_Ath, - pub dca: CatalogTree_Computed_Market_Dca, - pub indicators: CatalogTree_Computed_Market_Indicators, - pub lookback: CatalogTree_Computed_Market_Lookback, - pub moving_average: CatalogTree_Computed_Market_MovingAverage, - pub range: CatalogTree_Computed_Market_Range, - pub returns: CatalogTree_Computed_Market_Returns, - pub volatility: CatalogTree_Computed_Market_Volatility, +pub struct CatalogTree_Market { + pub ath: CatalogTree_Market_Ath, + pub dca: CatalogTree_Market_Dca, + pub indicators: CatalogTree_Market_Indicators, + pub lookback: CatalogTree_Market_Lookback, + pub moving_average: CatalogTree_Market_MovingAverage, + pub range: CatalogTree_Market_Range, + pub returns: CatalogTree_Market_Returns, + pub volatility: CatalogTree_Market_Volatility, } -impl CatalogTree_Computed_Market { +impl CatalogTree_Market { pub fn new(client: Arc, base_path: String) -> Self { Self { - ath: CatalogTree_Computed_Market_Ath::new(client.clone(), format!("{base_path}_ath")), - dca: CatalogTree_Computed_Market_Dca::new(client.clone(), format!("{base_path}_dca")), - indicators: CatalogTree_Computed_Market_Indicators::new(client.clone(), format!("{base_path}_indicators")), - lookback: CatalogTree_Computed_Market_Lookback::new(client.clone(), format!("{base_path}_lookback")), - moving_average: CatalogTree_Computed_Market_MovingAverage::new(client.clone(), format!("{base_path}_moving_average")), - range: CatalogTree_Computed_Market_Range::new(client.clone(), format!("{base_path}_range")), - returns: CatalogTree_Computed_Market_Returns::new(client.clone(), format!("{base_path}_returns")), - volatility: CatalogTree_Computed_Market_Volatility::new(client.clone(), format!("{base_path}_volatility")), + ath: CatalogTree_Market_Ath::new(client.clone(), format!("{base_path}_ath")), + dca: CatalogTree_Market_Dca::new(client.clone(), format!("{base_path}_dca")), + indicators: CatalogTree_Market_Indicators::new(client.clone(), format!("{base_path}_indicators")), + lookback: CatalogTree_Market_Lookback::new(client.clone(), format!("{base_path}_lookback")), + moving_average: CatalogTree_Market_MovingAverage::new(client.clone(), format!("{base_path}_moving_average")), + range: CatalogTree_Market_Range::new(client.clone(), format!("{base_path}_range")), + returns: CatalogTree_Market_Returns::new(client.clone(), format!("{base_path}_returns")), + volatility: CatalogTree_Market_Volatility::new(client.clone(), format!("{base_path}_volatility")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_Ath { - pub days_since_price_ath: MetricPattern5, - pub max_days_between_price_aths: MetricPattern5, - pub max_years_between_price_aths: MetricPattern5, +pub struct CatalogTree_Market_Ath { + pub days_since_price_ath: MetricPattern4, + pub max_days_between_price_aths: MetricPattern4, + pub max_years_between_price_aths: MetricPattern4, pub price_ath: MetricPattern3, pub price_drawdown: MetricPattern3, - pub years_since_price_ath: MetricPattern5, + pub years_since_price_ath: MetricPattern4, } -impl CatalogTree_Computed_Market_Ath { +impl CatalogTree_Market_Ath { pub fn new(client: Arc, base_path: String) -> Self { Self { - days_since_price_ath: MetricPattern5::new(client.clone(), format!("{base_path}_days_since_price_ath")), - max_days_between_price_aths: MetricPattern5::new(client.clone(), format!("{base_path}_max_days_between_price_aths")), - max_years_between_price_aths: MetricPattern5::new(client.clone(), format!("{base_path}_max_years_between_price_aths")), + days_since_price_ath: MetricPattern4::new(client.clone(), format!("{base_path}_days_since_price_ath")), + max_days_between_price_aths: MetricPattern4::new(client.clone(), format!("{base_path}_max_days_between_price_aths")), + max_years_between_price_aths: MetricPattern4::new(client.clone(), format!("{base_path}_max_years_between_price_aths")), price_ath: MetricPattern3::new(client.clone(), format!("{base_path}_price_ath")), price_drawdown: MetricPattern3::new(client.clone(), format!("{base_path}_price_drawdown")), - years_since_price_ath: MetricPattern5::new(client.clone(), format!("{base_path}_years_since_price_ath")), + years_since_price_ath: MetricPattern4::new(client.clone(), format!("{base_path}_years_since_price_ath")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_Dca { +pub struct CatalogTree_Market_Dca { pub class_average_price: ClassAveragePricePattern, pub class_returns: ClassAveragePricePattern, - pub class_stack: CatalogTree_Computed_Market_Dca_ClassStack, + pub class_stack: CatalogTree_Market_Dca_ClassStack, pub period_average_price: PeriodAveragePricePattern, pub period_cagr: PeriodCagrPattern, pub period_lump_sum_stack: PeriodLumpSumStackPattern, @@ -5960,12 +5639,12 @@ pub struct CatalogTree_Computed_Market_Dca { pub period_stack: PeriodLumpSumStackPattern, } -impl CatalogTree_Computed_Market_Dca { +impl CatalogTree_Market_Dca { pub fn new(client: Arc, base_path: String) -> Self { Self { class_average_price: ClassAveragePricePattern::new(client.clone(), "dca_class".to_string()), class_returns: ClassAveragePricePattern::new(client.clone(), "dca_class".to_string()), - class_stack: CatalogTree_Computed_Market_Dca_ClassStack::new(client.clone(), format!("{base_path}_class_stack")), + class_stack: CatalogTree_Market_Dca_ClassStack::new(client.clone(), format!("{base_path}_class_stack")), period_average_price: PeriodAveragePricePattern::new(client.clone(), "dca_average_price".to_string()), period_cagr: PeriodCagrPattern::new(client.clone(), "dca_cagr".to_string()), period_lump_sum_stack: PeriodLumpSumStackPattern::new(client.clone(), "".to_string()), @@ -5976,7 +5655,7 @@ impl CatalogTree_Computed_Market_Dca { } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_Dca_ClassStack { +pub struct CatalogTree_Market_Dca_ClassStack { pub _2015: ActiveSupplyPattern, pub _2016: ActiveSupplyPattern, pub _2017: ActiveSupplyPattern, @@ -5990,7 +5669,7 @@ pub struct CatalogTree_Computed_Market_Dca_ClassStack { pub _2025: ActiveSupplyPattern, } -impl CatalogTree_Computed_Market_Dca_ClassStack { +impl CatalogTree_Market_Dca_ClassStack { pub fn new(client: Arc, base_path: String) -> Self { Self { _2015: ActiveSupplyPattern::new(client.clone(), "dca_class_2015_stack".to_string()), @@ -6009,60 +5688,60 @@ impl CatalogTree_Computed_Market_Dca_ClassStack { } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_Indicators { - pub gini: MetricPattern22, - pub macd_histogram: MetricPattern22, - pub macd_line: MetricPattern22, - pub macd_signal: MetricPattern22, - pub nvt: MetricPattern5, - pub pi_cycle: MetricPattern22, - pub puell_multiple: MetricPattern5, - pub rsi_14d: MetricPattern22, - pub rsi_14d_max: MetricPattern22, - pub rsi_14d_min: MetricPattern22, - pub rsi_average_gain_14d: MetricPattern22, - pub rsi_average_loss_14d: MetricPattern22, - pub rsi_gains: MetricPattern22, - pub rsi_losses: MetricPattern22, - pub stoch_d: MetricPattern22, - pub stoch_k: MetricPattern22, - pub stoch_rsi: MetricPattern22, - pub stoch_rsi_d: MetricPattern22, - pub stoch_rsi_k: MetricPattern22, +pub struct CatalogTree_Market_Indicators { + pub gini: MetricPattern19, + pub macd_histogram: MetricPattern19, + pub macd_line: MetricPattern19, + pub macd_signal: MetricPattern19, + pub nvt: MetricPattern4, + pub pi_cycle: MetricPattern19, + pub puell_multiple: MetricPattern4, + pub rsi_14d: MetricPattern19, + pub rsi_14d_max: MetricPattern19, + pub rsi_14d_min: MetricPattern19, + pub rsi_average_gain_14d: MetricPattern19, + pub rsi_average_loss_14d: MetricPattern19, + pub rsi_gains: MetricPattern19, + pub rsi_losses: MetricPattern19, + pub stoch_d: MetricPattern19, + pub stoch_k: MetricPattern19, + pub stoch_rsi: MetricPattern19, + pub stoch_rsi_d: MetricPattern19, + pub stoch_rsi_k: MetricPattern19, } -impl CatalogTree_Computed_Market_Indicators { +impl CatalogTree_Market_Indicators { pub fn new(client: Arc, base_path: String) -> Self { Self { - gini: MetricPattern22::new(client.clone(), format!("{base_path}_gini")), - macd_histogram: MetricPattern22::new(client.clone(), format!("{base_path}_macd_histogram")), - macd_line: MetricPattern22::new(client.clone(), format!("{base_path}_macd_line")), - macd_signal: MetricPattern22::new(client.clone(), format!("{base_path}_macd_signal")), - nvt: MetricPattern5::new(client.clone(), format!("{base_path}_nvt")), - pi_cycle: MetricPattern22::new(client.clone(), format!("{base_path}_pi_cycle")), - puell_multiple: MetricPattern5::new(client.clone(), format!("{base_path}_puell_multiple")), - rsi_14d: MetricPattern22::new(client.clone(), format!("{base_path}_rsi_14d")), - rsi_14d_max: MetricPattern22::new(client.clone(), format!("{base_path}_rsi_14d_max")), - rsi_14d_min: MetricPattern22::new(client.clone(), format!("{base_path}_rsi_14d_min")), - rsi_average_gain_14d: MetricPattern22::new(client.clone(), format!("{base_path}_rsi_average_gain_14d")), - rsi_average_loss_14d: MetricPattern22::new(client.clone(), format!("{base_path}_rsi_average_loss_14d")), - rsi_gains: MetricPattern22::new(client.clone(), format!("{base_path}_rsi_gains")), - rsi_losses: MetricPattern22::new(client.clone(), format!("{base_path}_rsi_losses")), - stoch_d: MetricPattern22::new(client.clone(), format!("{base_path}_stoch_d")), - stoch_k: MetricPattern22::new(client.clone(), format!("{base_path}_stoch_k")), - stoch_rsi: MetricPattern22::new(client.clone(), format!("{base_path}_stoch_rsi")), - stoch_rsi_d: MetricPattern22::new(client.clone(), format!("{base_path}_stoch_rsi_d")), - stoch_rsi_k: MetricPattern22::new(client.clone(), format!("{base_path}_stoch_rsi_k")), + gini: MetricPattern19::new(client.clone(), format!("{base_path}_gini")), + macd_histogram: MetricPattern19::new(client.clone(), format!("{base_path}_macd_histogram")), + macd_line: MetricPattern19::new(client.clone(), format!("{base_path}_macd_line")), + macd_signal: MetricPattern19::new(client.clone(), format!("{base_path}_macd_signal")), + nvt: MetricPattern4::new(client.clone(), format!("{base_path}_nvt")), + pi_cycle: MetricPattern19::new(client.clone(), format!("{base_path}_pi_cycle")), + puell_multiple: MetricPattern4::new(client.clone(), format!("{base_path}_puell_multiple")), + rsi_14d: MetricPattern19::new(client.clone(), format!("{base_path}_rsi_14d")), + rsi_14d_max: MetricPattern19::new(client.clone(), format!("{base_path}_rsi_14d_max")), + rsi_14d_min: MetricPattern19::new(client.clone(), format!("{base_path}_rsi_14d_min")), + rsi_average_gain_14d: MetricPattern19::new(client.clone(), format!("{base_path}_rsi_average_gain_14d")), + rsi_average_loss_14d: MetricPattern19::new(client.clone(), format!("{base_path}_rsi_average_loss_14d")), + rsi_gains: MetricPattern19::new(client.clone(), format!("{base_path}_rsi_gains")), + rsi_losses: MetricPattern19::new(client.clone(), format!("{base_path}_rsi_losses")), + stoch_d: MetricPattern19::new(client.clone(), format!("{base_path}_stoch_d")), + stoch_k: MetricPattern19::new(client.clone(), format!("{base_path}_stoch_k")), + stoch_rsi: MetricPattern19::new(client.clone(), format!("{base_path}_stoch_rsi")), + stoch_rsi_d: MetricPattern19::new(client.clone(), format!("{base_path}_stoch_rsi_d")), + stoch_rsi_k: MetricPattern19::new(client.clone(), format!("{base_path}_stoch_rsi_k")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_Lookback { +pub struct CatalogTree_Market_Lookback { pub price_ago: PriceAgoPattern, } -impl CatalogTree_Computed_Market_Lookback { +impl CatalogTree_Market_Lookback { pub fn new(client: Arc, base_path: String) -> Self { Self { price_ago: PriceAgoPattern::new(client.clone(), "price".to_string()), @@ -6071,7 +5750,7 @@ impl CatalogTree_Computed_Market_Lookback { } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_MovingAverage { +pub struct CatalogTree_Market_MovingAverage { pub price_111d_sma: Price111dSmaPattern, pub price_12d_ema: Price111dSmaPattern, pub price_13d_ema: Price111dSmaPattern, @@ -6086,8 +5765,8 @@ pub struct CatalogTree_Computed_Market_MovingAverage { pub price_1y_sma: Price111dSmaPattern, pub price_200d_ema: Price111dSmaPattern, pub price_200d_sma: Price111dSmaPattern, - pub price_200d_sma_x0_8: MetricPattern5, - pub price_200d_sma_x2_4: MetricPattern5, + pub price_200d_sma_x0_8: MetricPattern4, + pub price_200d_sma_x2_4: MetricPattern4, pub price_200w_ema: Price111dSmaPattern, pub price_200w_sma: Price111dSmaPattern, pub price_21d_ema: Price111dSmaPattern, @@ -6098,7 +5777,7 @@ pub struct CatalogTree_Computed_Market_MovingAverage { pub price_34d_ema: Price111dSmaPattern, pub price_34d_sma: Price111dSmaPattern, pub price_350d_sma: Price111dSmaPattern, - pub price_350d_sma_x2: MetricPattern5, + pub price_350d_sma_x2: MetricPattern4, pub price_4y_ema: Price111dSmaPattern, pub price_4y_sma: Price111dSmaPattern, pub price_55d_ema: Price111dSmaPattern, @@ -6109,7 +5788,7 @@ pub struct CatalogTree_Computed_Market_MovingAverage { pub price_8d_sma: Price111dSmaPattern, } -impl CatalogTree_Computed_Market_MovingAverage { +impl CatalogTree_Market_MovingAverage { pub fn new(client: Arc, base_path: String) -> Self { Self { price_111d_sma: Price111dSmaPattern::new(client.clone(), "price_111d_sma".to_string()), @@ -6126,8 +5805,8 @@ impl CatalogTree_Computed_Market_MovingAverage { price_1y_sma: Price111dSmaPattern::new(client.clone(), "price_1y_sma".to_string()), price_200d_ema: Price111dSmaPattern::new(client.clone(), "price_200d_ema".to_string()), price_200d_sma: Price111dSmaPattern::new(client.clone(), "price_200d_sma".to_string()), - price_200d_sma_x0_8: MetricPattern5::new(client.clone(), format!("{base_path}_price_200d_sma_x0_8")), - price_200d_sma_x2_4: MetricPattern5::new(client.clone(), format!("{base_path}_price_200d_sma_x2_4")), + price_200d_sma_x0_8: MetricPattern4::new(client.clone(), format!("{base_path}_price_200d_sma_x0_8")), + price_200d_sma_x2_4: MetricPattern4::new(client.clone(), format!("{base_path}_price_200d_sma_x2_4")), price_200w_ema: Price111dSmaPattern::new(client.clone(), "price_200w_ema".to_string()), price_200w_sma: Price111dSmaPattern::new(client.clone(), "price_200w_sma".to_string()), price_21d_ema: Price111dSmaPattern::new(client.clone(), "price_21d_ema".to_string()), @@ -6138,7 +5817,7 @@ impl CatalogTree_Computed_Market_MovingAverage { price_34d_ema: Price111dSmaPattern::new(client.clone(), "price_34d_ema".to_string()), price_34d_sma: Price111dSmaPattern::new(client.clone(), "price_34d_sma".to_string()), price_350d_sma: Price111dSmaPattern::new(client.clone(), "price_350d_sma".to_string()), - price_350d_sma_x2: MetricPattern5::new(client.clone(), format!("{base_path}_price_350d_sma_x2")), + price_350d_sma_x2: MetricPattern4::new(client.clone(), format!("{base_path}_price_350d_sma_x2")), price_4y_ema: Price111dSmaPattern::new(client.clone(), "price_4y_ema".to_string()), price_4y_sma: Price111dSmaPattern::new(client.clone(), "price_4y_sma".to_string()), price_55d_ema: Price111dSmaPattern::new(client.clone(), "price_55d_ema".to_string()), @@ -6152,40 +5831,40 @@ impl CatalogTree_Computed_Market_MovingAverage { } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_Range { - pub price_1m_max: MetricPattern5, - pub price_1m_min: MetricPattern5, - pub price_1w_max: MetricPattern5, - pub price_1w_min: MetricPattern5, - pub price_1y_max: MetricPattern5, - pub price_1y_min: MetricPattern5, - pub price_2w_choppiness_index: MetricPattern5, - pub price_2w_max: MetricPattern5, - pub price_2w_min: MetricPattern5, - pub price_true_range: MetricPattern22, - pub price_true_range_2w_sum: MetricPattern22, +pub struct CatalogTree_Market_Range { + pub price_1m_max: MetricPattern4, + pub price_1m_min: MetricPattern4, + pub price_1w_max: MetricPattern4, + pub price_1w_min: MetricPattern4, + pub price_1y_max: MetricPattern4, + pub price_1y_min: MetricPattern4, + pub price_2w_choppiness_index: MetricPattern4, + pub price_2w_max: MetricPattern4, + pub price_2w_min: MetricPattern4, + pub price_true_range: MetricPattern19, + pub price_true_range_2w_sum: MetricPattern19, } -impl CatalogTree_Computed_Market_Range { +impl CatalogTree_Market_Range { pub fn new(client: Arc, base_path: String) -> Self { Self { - price_1m_max: MetricPattern5::new(client.clone(), format!("{base_path}_price_1m_max")), - price_1m_min: MetricPattern5::new(client.clone(), format!("{base_path}_price_1m_min")), - price_1w_max: MetricPattern5::new(client.clone(), format!("{base_path}_price_1w_max")), - price_1w_min: MetricPattern5::new(client.clone(), format!("{base_path}_price_1w_min")), - price_1y_max: MetricPattern5::new(client.clone(), format!("{base_path}_price_1y_max")), - price_1y_min: MetricPattern5::new(client.clone(), format!("{base_path}_price_1y_min")), - price_2w_choppiness_index: MetricPattern5::new(client.clone(), format!("{base_path}_price_2w_choppiness_index")), - price_2w_max: MetricPattern5::new(client.clone(), format!("{base_path}_price_2w_max")), - price_2w_min: MetricPattern5::new(client.clone(), format!("{base_path}_price_2w_min")), - price_true_range: MetricPattern22::new(client.clone(), format!("{base_path}_price_true_range")), - price_true_range_2w_sum: MetricPattern22::new(client.clone(), format!("{base_path}_price_true_range_2w_sum")), + price_1m_max: MetricPattern4::new(client.clone(), format!("{base_path}_price_1m_max")), + price_1m_min: MetricPattern4::new(client.clone(), format!("{base_path}_price_1m_min")), + price_1w_max: MetricPattern4::new(client.clone(), format!("{base_path}_price_1w_max")), + price_1w_min: MetricPattern4::new(client.clone(), format!("{base_path}_price_1w_min")), + price_1y_max: MetricPattern4::new(client.clone(), format!("{base_path}_price_1y_max")), + price_1y_min: MetricPattern4::new(client.clone(), format!("{base_path}_price_1y_min")), + price_2w_choppiness_index: MetricPattern4::new(client.clone(), format!("{base_path}_price_2w_choppiness_index")), + price_2w_max: MetricPattern4::new(client.clone(), format!("{base_path}_price_2w_max")), + price_2w_min: MetricPattern4::new(client.clone(), format!("{base_path}_price_2w_min")), + price_true_range: MetricPattern19::new(client.clone(), format!("{base_path}_price_true_range")), + price_true_range_2w_sum: MetricPattern19::new(client.clone(), format!("{base_path}_price_true_range_2w_sum")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_Returns { +pub struct CatalogTree_Market_Returns { pub _1d_returns_1m_sd: _1dReturns1mSdPattern, pub _1d_returns_1w_sd: _1dReturns1mSdPattern, pub _1d_returns_1y_sd: _1dReturns1mSdPattern, @@ -6193,11 +5872,11 @@ pub struct CatalogTree_Computed_Market_Returns { pub downside_1m_sd: _1dReturns1mSdPattern, pub downside_1w_sd: _1dReturns1mSdPattern, pub downside_1y_sd: _1dReturns1mSdPattern, - pub downside_returns: MetricPattern22, + pub downside_returns: MetricPattern19, pub price_returns: PriceAgoPattern, } -impl CatalogTree_Computed_Market_Returns { +impl CatalogTree_Market_Returns { pub fn new(client: Arc, base_path: String) -> Self { Self { _1d_returns_1m_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1m_sd".to_string()), @@ -6207,616 +5886,642 @@ impl CatalogTree_Computed_Market_Returns { downside_1m_sd: _1dReturns1mSdPattern::new(client.clone(), "downside_1m_sd".to_string()), downside_1w_sd: _1dReturns1mSdPattern::new(client.clone(), "downside_1w_sd".to_string()), downside_1y_sd: _1dReturns1mSdPattern::new(client.clone(), "downside_1y_sd".to_string()), - downside_returns: MetricPattern22::new(client.clone(), format!("{base_path}_downside_returns")), + downside_returns: MetricPattern19::new(client.clone(), format!("{base_path}_downside_returns")), price_returns: PriceAgoPattern::new(client.clone(), "price_returns".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Market_Volatility { - pub price_1m_volatility: MetricPattern5, - pub price_1w_volatility: MetricPattern5, - pub price_1y_volatility: MetricPattern5, - pub sharpe_1m: MetricPattern22, - pub sharpe_1w: MetricPattern22, - pub sharpe_1y: MetricPattern22, - pub sortino_1m: MetricPattern22, - pub sortino_1w: MetricPattern22, - pub sortino_1y: MetricPattern22, +pub struct CatalogTree_Market_Volatility { + pub price_1m_volatility: MetricPattern4, + pub price_1w_volatility: MetricPattern4, + pub price_1y_volatility: MetricPattern4, + pub sharpe_1m: MetricPattern19, + pub sharpe_1w: MetricPattern19, + pub sharpe_1y: MetricPattern19, + pub sortino_1m: MetricPattern19, + pub sortino_1w: MetricPattern19, + pub sortino_1y: MetricPattern19, } -impl CatalogTree_Computed_Market_Volatility { +impl CatalogTree_Market_Volatility { pub fn new(client: Arc, base_path: String) -> Self { Self { - price_1m_volatility: MetricPattern5::new(client.clone(), format!("{base_path}_price_1m_volatility")), - price_1w_volatility: MetricPattern5::new(client.clone(), format!("{base_path}_price_1w_volatility")), - price_1y_volatility: MetricPattern5::new(client.clone(), format!("{base_path}_price_1y_volatility")), - sharpe_1m: MetricPattern22::new(client.clone(), format!("{base_path}_sharpe_1m")), - sharpe_1w: MetricPattern22::new(client.clone(), format!("{base_path}_sharpe_1w")), - sharpe_1y: MetricPattern22::new(client.clone(), format!("{base_path}_sharpe_1y")), - sortino_1m: MetricPattern22::new(client.clone(), format!("{base_path}_sortino_1m")), - sortino_1w: MetricPattern22::new(client.clone(), format!("{base_path}_sortino_1w")), - sortino_1y: MetricPattern22::new(client.clone(), format!("{base_path}_sortino_1y")), + price_1m_volatility: MetricPattern4::new(client.clone(), format!("{base_path}_price_1m_volatility")), + price_1w_volatility: MetricPattern4::new(client.clone(), format!("{base_path}_price_1w_volatility")), + price_1y_volatility: MetricPattern4::new(client.clone(), format!("{base_path}_price_1y_volatility")), + sharpe_1m: MetricPattern19::new(client.clone(), format!("{base_path}_sharpe_1m")), + sharpe_1w: MetricPattern19::new(client.clone(), format!("{base_path}_sharpe_1w")), + sharpe_1y: MetricPattern19::new(client.clone(), format!("{base_path}_sharpe_1y")), + sortino_1m: MetricPattern19::new(client.clone(), format!("{base_path}_sortino_1m")), + sortino_1w: MetricPattern19::new(client.clone(), format!("{base_path}_sortino_1w")), + sortino_1y: MetricPattern19::new(client.clone(), format!("{base_path}_sortino_1y")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Outputs { - pub count: CatalogTree_Computed_Outputs_Count, - pub spent: CatalogTree_Computed_Outputs_Spent, +pub struct CatalogTree_Outputs { + pub count: CatalogTree_Outputs_Count, + pub first_txoutindex: MetricPattern23, + pub outputtype: MetricPattern26, + pub spent: CatalogTree_Outputs_Spent, + pub txindex: MetricPattern26, + pub typeindex: MetricPattern26, + pub value: MetricPattern26, } -impl CatalogTree_Computed_Outputs { +impl CatalogTree_Outputs { pub fn new(client: Arc, base_path: String) -> Self { Self { - count: CatalogTree_Computed_Outputs_Count::new(client.clone(), format!("{base_path}_count")), - spent: CatalogTree_Computed_Outputs_Spent::new(client.clone(), format!("{base_path}_spent")), + count: CatalogTree_Outputs_Count::new(client.clone(), format!("{base_path}_count")), + first_txoutindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_txoutindex")), + outputtype: MetricPattern26::new(client.clone(), format!("{base_path}_outputtype")), + spent: CatalogTree_Outputs_Spent::new(client.clone(), format!("{base_path}_spent")), + txindex: MetricPattern26::new(client.clone(), format!("{base_path}_txindex")), + typeindex: MetricPattern26::new(client.clone(), format!("{base_path}_typeindex")), + value: MetricPattern26::new(client.clone(), format!("{base_path}_value")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Outputs_Count { - pub count: CountPattern2, - pub utxo_count: DollarsPattern, +pub struct CatalogTree_Outputs_Count { + pub count: BlockSizePattern, + pub utxo_count: BitcoinPattern, } -impl CatalogTree_Computed_Outputs_Count { +impl CatalogTree_Outputs_Count { pub fn new(client: Arc, base_path: String) -> Self { Self { - count: CountPattern2::new(client.clone(), "output_count".to_string()), - utxo_count: DollarsPattern::new(client.clone(), "exact_utxo_count".to_string()), + count: BlockSizePattern::new(client.clone(), "output_count".to_string()), + utxo_count: BitcoinPattern::new(client.clone(), "exact_utxo_count".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Outputs_Spent { - pub txinindex: MetricPattern29, +pub struct CatalogTree_Outputs_Spent { + pub txinindex: MetricPattern26, } -impl CatalogTree_Computed_Outputs_Spent { +impl CatalogTree_Outputs_Spent { pub fn new(client: Arc, base_path: String) -> Self { Self { - txinindex: MetricPattern29::new(client.clone(), format!("{base_path}_txinindex")), + txinindex: MetricPattern26::new(client.clone(), format!("{base_path}_txinindex")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Pools { - pub pool: MetricPattern26, - pub vecs: CatalogTree_Computed_Pools_Vecs, +pub struct CatalogTree_Pools { + pub pool: MetricPattern23, + pub vecs: CatalogTree_Pools_Vecs, } -impl CatalogTree_Computed_Pools { +impl CatalogTree_Pools { pub fn new(client: Arc, base_path: String) -> Self { Self { - pool: MetricPattern26::new(client.clone(), format!("{base_path}_pool")), - vecs: CatalogTree_Computed_Pools_Vecs::new(client.clone(), format!("{base_path}_vecs")), + pool: MetricPattern23::new(client.clone(), format!("{base_path}_pool")), + vecs: CatalogTree_Pools_Vecs::new(client.clone(), format!("{base_path}_vecs")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Pools_Vecs { - pub axbt: AXbtPattern, - pub aaopool: AXbtPattern, - pub antpool: AXbtPattern, - pub arkpool: AXbtPattern, - pub asicminer: AXbtPattern, - pub batpool: AXbtPattern, - pub bcmonster: AXbtPattern, - pub bcpoolio: AXbtPattern, - pub binancepool: AXbtPattern, - pub bitclub: AXbtPattern, - pub bitfufupool: AXbtPattern, - pub bitfury: AXbtPattern, - pub bitminter: AXbtPattern, - pub bitalo: AXbtPattern, - pub bitcoinaffiliatenetwork: AXbtPattern, - pub bitcoincom: AXbtPattern, - pub bitcoinindia: AXbtPattern, - pub bitcoinrussia: AXbtPattern, - pub bitcoinukraine: AXbtPattern, - pub bitfarms: AXbtPattern, - pub bitparking: AXbtPattern, - pub bitsolo: AXbtPattern, - pub bixin: AXbtPattern, - pub blockfills: AXbtPattern, - pub braiinspool: AXbtPattern, - pub bravomining: AXbtPattern, - pub btpool: AXbtPattern, - pub btccom: AXbtPattern, - pub btcdig: AXbtPattern, - pub btcguild: AXbtPattern, - pub btclab: AXbtPattern, - pub btcmp: AXbtPattern, - pub btcnuggets: AXbtPattern, - pub btcpoolparty: AXbtPattern, - pub btcserv: AXbtPattern, - pub btctop: AXbtPattern, - pub btcc: AXbtPattern, - pub bwpool: AXbtPattern, - pub bytepool: AXbtPattern, - pub canoe: AXbtPattern, - pub canoepool: AXbtPattern, - pub carbonnegative: AXbtPattern, - pub ckpool: AXbtPattern, - pub cloudhashing: AXbtPattern, - pub coinlab: AXbtPattern, - pub cointerra: AXbtPattern, - pub connectbtc: AXbtPattern, - pub dpool: AXbtPattern, - pub dcexploration: AXbtPattern, - pub dcex: AXbtPattern, - pub digitalbtc: AXbtPattern, - pub digitalxmintsy: AXbtPattern, - pub eclipsemc: AXbtPattern, - pub eightbaochi: AXbtPattern, - pub ekanembtc: AXbtPattern, - pub eligius: AXbtPattern, - pub emcdpool: AXbtPattern, - pub entrustcharitypool: AXbtPattern, - pub eobot: AXbtPattern, - pub exxbw: AXbtPattern, - pub f2pool: AXbtPattern, - pub fiftyeightcoin: AXbtPattern, - pub foundryusa: AXbtPattern, - pub futurebitapollosolo: AXbtPattern, - pub gbminers: AXbtPattern, - pub ghashio: AXbtPattern, - pub givemecoins: AXbtPattern, - pub gogreenlight: AXbtPattern, - pub haozhuzhu: AXbtPattern, - pub haominer: AXbtPattern, - pub hashbx: AXbtPattern, - pub hashpool: AXbtPattern, - pub helix: AXbtPattern, - pub hhtt: AXbtPattern, - pub hotpool: AXbtPattern, - pub hummerpool: AXbtPattern, - pub huobipool: AXbtPattern, - pub innopolistech: AXbtPattern, - pub kanopool: AXbtPattern, - pub kncminer: AXbtPattern, - pub kucoinpool: AXbtPattern, - pub lubiancom: AXbtPattern, - pub luckypool: AXbtPattern, - pub luxor: AXbtPattern, - pub marapool: AXbtPattern, - pub maxbtc: AXbtPattern, - pub maxipool: AXbtPattern, - pub megabigpower: AXbtPattern, - pub minerium: AXbtPattern, - pub miningcity: AXbtPattern, - pub miningdutch: AXbtPattern, - pub miningkings: AXbtPattern, - pub miningsquared: AXbtPattern, - pub mmpool: AXbtPattern, - pub mtred: AXbtPattern, - pub multicoinco: AXbtPattern, - pub multipool: AXbtPattern, - pub mybtccoinpool: AXbtPattern, - pub neopool: AXbtPattern, - pub nexious: AXbtPattern, - pub nicehash: AXbtPattern, - pub nmcbit: AXbtPattern, - pub novablock: AXbtPattern, - pub ocean: AXbtPattern, - pub okexpool: AXbtPattern, - pub okminer: AXbtPattern, - pub okkong: AXbtPattern, - pub okpooltop: AXbtPattern, - pub onehash: AXbtPattern, - pub onem1x: AXbtPattern, - pub onethash: AXbtPattern, - pub ozcoin: AXbtPattern, - pub phashio: AXbtPattern, - pub parasite: AXbtPattern, - pub patels: AXbtPattern, - pub pegapool: AXbtPattern, - pub phoenix: AXbtPattern, - pub polmine: AXbtPattern, - pub pool175btc: AXbtPattern, - pub pool50btc: AXbtPattern, - pub poolin: AXbtPattern, - pub portlandhodl: AXbtPattern, - pub publicpool: AXbtPattern, - pub purebtccom: AXbtPattern, - pub rawpool: AXbtPattern, - pub rigpool: AXbtPattern, - pub sbicrypto: AXbtPattern, - pub secpool: AXbtPattern, - pub secretsuperstar: AXbtPattern, - pub sevenpool: AXbtPattern, - pub shawnp0wers: AXbtPattern, - pub sigmapoolcom: AXbtPattern, - pub simplecoinus: AXbtPattern, - pub solock: AXbtPattern, - pub spiderpool: AXbtPattern, - pub stminingcorp: AXbtPattern, - pub tangpool: AXbtPattern, - pub tatmaspool: AXbtPattern, - pub tbdice: AXbtPattern, - pub telco214: AXbtPattern, - pub terrapool: AXbtPattern, - pub tiger: AXbtPattern, - pub tigerpoolnet: AXbtPattern, - pub titan: AXbtPattern, - pub transactioncoinmining: AXbtPattern, - pub trickysbtcpool: AXbtPattern, - pub triplemining: AXbtPattern, - pub twentyoneinc: AXbtPattern, - pub ultimuspool: AXbtPattern, - pub unknown: AXbtPattern, - pub unomp: AXbtPattern, - pub viabtc: AXbtPattern, - pub waterhole: AXbtPattern, - pub wayicn: AXbtPattern, - pub whitepool: AXbtPattern, - pub wk057: AXbtPattern, - pub yourbtcnet: AXbtPattern, - pub zulupool: AXbtPattern, +pub struct CatalogTree_Pools_Vecs { + pub aaopool: AaopoolPattern, + pub antpool: AaopoolPattern, + pub arkpool: AaopoolPattern, + pub asicminer: AaopoolPattern, + pub axbt: AaopoolPattern, + pub batpool: AaopoolPattern, + pub bcmonster: AaopoolPattern, + pub bcpoolio: AaopoolPattern, + pub binancepool: AaopoolPattern, + pub bitalo: AaopoolPattern, + pub bitclub: AaopoolPattern, + pub bitcoinaffiliatenetwork: AaopoolPattern, + pub bitcoincom: AaopoolPattern, + pub bitcoinindia: AaopoolPattern, + pub bitcoinrussia: AaopoolPattern, + pub bitcoinukraine: AaopoolPattern, + pub bitfarms: AaopoolPattern, + pub bitfufupool: AaopoolPattern, + pub bitfury: AaopoolPattern, + pub bitminter: AaopoolPattern, + pub bitparking: AaopoolPattern, + pub bitsolo: AaopoolPattern, + pub bixin: AaopoolPattern, + pub blockfills: AaopoolPattern, + pub braiinspool: AaopoolPattern, + pub bravomining: AaopoolPattern, + pub btcc: AaopoolPattern, + pub btccom: AaopoolPattern, + pub btcdig: AaopoolPattern, + pub btcguild: AaopoolPattern, + pub btclab: AaopoolPattern, + pub btcmp: AaopoolPattern, + pub btcnuggets: AaopoolPattern, + pub btcpoolparty: AaopoolPattern, + pub btcserv: AaopoolPattern, + pub btctop: AaopoolPattern, + pub btpool: AaopoolPattern, + pub bwpool: AaopoolPattern, + pub bytepool: AaopoolPattern, + pub canoe: AaopoolPattern, + pub canoepool: AaopoolPattern, + pub carbonnegative: AaopoolPattern, + pub ckpool: AaopoolPattern, + pub cloudhashing: AaopoolPattern, + pub coinlab: AaopoolPattern, + pub cointerra: AaopoolPattern, + pub connectbtc: AaopoolPattern, + pub dcex: AaopoolPattern, + pub dcexploration: AaopoolPattern, + pub digitalbtc: AaopoolPattern, + pub digitalxmintsy: AaopoolPattern, + pub dpool: AaopoolPattern, + pub eclipsemc: AaopoolPattern, + pub eightbaochi: AaopoolPattern, + pub ekanembtc: AaopoolPattern, + pub eligius: AaopoolPattern, + pub emcdpool: AaopoolPattern, + pub entrustcharitypool: AaopoolPattern, + pub eobot: AaopoolPattern, + pub exxbw: AaopoolPattern, + pub f2pool: AaopoolPattern, + pub fiftyeightcoin: AaopoolPattern, + pub foundryusa: AaopoolPattern, + pub futurebitapollosolo: AaopoolPattern, + pub gbminers: AaopoolPattern, + pub ghashio: AaopoolPattern, + pub givemecoins: AaopoolPattern, + pub gogreenlight: AaopoolPattern, + pub haominer: AaopoolPattern, + pub haozhuzhu: AaopoolPattern, + pub hashbx: AaopoolPattern, + pub hashpool: AaopoolPattern, + pub helix: AaopoolPattern, + pub hhtt: AaopoolPattern, + pub hotpool: AaopoolPattern, + pub hummerpool: AaopoolPattern, + pub huobipool: AaopoolPattern, + pub innopolistech: AaopoolPattern, + pub kanopool: AaopoolPattern, + pub kncminer: AaopoolPattern, + pub kucoinpool: AaopoolPattern, + pub lubiancom: AaopoolPattern, + pub luckypool: AaopoolPattern, + pub luxor: AaopoolPattern, + pub marapool: AaopoolPattern, + pub maxbtc: AaopoolPattern, + pub maxipool: AaopoolPattern, + pub megabigpower: AaopoolPattern, + pub minerium: AaopoolPattern, + pub miningcity: AaopoolPattern, + pub miningdutch: AaopoolPattern, + pub miningkings: AaopoolPattern, + pub miningsquared: AaopoolPattern, + pub mmpool: AaopoolPattern, + pub mtred: AaopoolPattern, + pub multicoinco: AaopoolPattern, + pub multipool: AaopoolPattern, + pub mybtccoinpool: AaopoolPattern, + pub neopool: AaopoolPattern, + pub nexious: AaopoolPattern, + pub nicehash: AaopoolPattern, + pub nmcbit: AaopoolPattern, + pub novablock: AaopoolPattern, + pub ocean: AaopoolPattern, + pub okexpool: AaopoolPattern, + pub okkong: AaopoolPattern, + pub okminer: AaopoolPattern, + pub okpooltop: AaopoolPattern, + pub onehash: AaopoolPattern, + pub onem1x: AaopoolPattern, + pub onethash: AaopoolPattern, + pub ozcoin: AaopoolPattern, + pub parasite: AaopoolPattern, + pub patels: AaopoolPattern, + pub pegapool: AaopoolPattern, + pub phashio: AaopoolPattern, + pub phoenix: AaopoolPattern, + pub polmine: AaopoolPattern, + pub pool175btc: AaopoolPattern, + pub pool50btc: AaopoolPattern, + pub poolin: AaopoolPattern, + pub portlandhodl: AaopoolPattern, + pub publicpool: AaopoolPattern, + pub purebtccom: AaopoolPattern, + pub rawpool: AaopoolPattern, + pub rigpool: AaopoolPattern, + pub sbicrypto: AaopoolPattern, + pub secpool: AaopoolPattern, + pub secretsuperstar: AaopoolPattern, + pub sevenpool: AaopoolPattern, + pub shawnp0wers: AaopoolPattern, + pub sigmapoolcom: AaopoolPattern, + pub simplecoinus: AaopoolPattern, + pub solock: AaopoolPattern, + pub spiderpool: AaopoolPattern, + pub stminingcorp: AaopoolPattern, + pub tangpool: AaopoolPattern, + pub tatmaspool: AaopoolPattern, + pub tbdice: AaopoolPattern, + pub telco214: AaopoolPattern, + pub terrapool: AaopoolPattern, + pub tiger: AaopoolPattern, + pub tigerpoolnet: AaopoolPattern, + pub titan: AaopoolPattern, + pub transactioncoinmining: AaopoolPattern, + pub trickysbtcpool: AaopoolPattern, + pub triplemining: AaopoolPattern, + pub twentyoneinc: AaopoolPattern, + pub ultimuspool: AaopoolPattern, + pub unknown: AaopoolPattern, + pub unomp: AaopoolPattern, + pub viabtc: AaopoolPattern, + pub waterhole: AaopoolPattern, + pub wayicn: AaopoolPattern, + pub whitepool: AaopoolPattern, + pub wk057: AaopoolPattern, + pub yourbtcnet: AaopoolPattern, + pub zulupool: AaopoolPattern, } -impl CatalogTree_Computed_Pools_Vecs { +impl CatalogTree_Pools_Vecs { pub fn new(client: Arc, base_path: String) -> Self { Self { - axbt: AXbtPattern::new(client.clone(), "axbt".to_string()), - aaopool: AXbtPattern::new(client.clone(), "aaopool".to_string()), - antpool: AXbtPattern::new(client.clone(), "antpool".to_string()), - arkpool: AXbtPattern::new(client.clone(), "arkpool".to_string()), - asicminer: AXbtPattern::new(client.clone(), "asicminer".to_string()), - batpool: AXbtPattern::new(client.clone(), "batpool".to_string()), - bcmonster: AXbtPattern::new(client.clone(), "bcmonster".to_string()), - bcpoolio: AXbtPattern::new(client.clone(), "bcpoolio".to_string()), - binancepool: AXbtPattern::new(client.clone(), "binancepool".to_string()), - bitclub: AXbtPattern::new(client.clone(), "bitclub".to_string()), - bitfufupool: AXbtPattern::new(client.clone(), "bitfufupool".to_string()), - bitfury: AXbtPattern::new(client.clone(), "bitfury".to_string()), - bitminter: AXbtPattern::new(client.clone(), "bitminter".to_string()), - bitalo: AXbtPattern::new(client.clone(), "bitalo".to_string()), - bitcoinaffiliatenetwork: AXbtPattern::new(client.clone(), "bitcoinaffiliatenetwork".to_string()), - bitcoincom: AXbtPattern::new(client.clone(), "bitcoincom".to_string()), - bitcoinindia: AXbtPattern::new(client.clone(), "bitcoinindia".to_string()), - bitcoinrussia: AXbtPattern::new(client.clone(), "bitcoinrussia".to_string()), - bitcoinukraine: AXbtPattern::new(client.clone(), "bitcoinukraine".to_string()), - bitfarms: AXbtPattern::new(client.clone(), "bitfarms".to_string()), - bitparking: AXbtPattern::new(client.clone(), "bitparking".to_string()), - bitsolo: AXbtPattern::new(client.clone(), "bitsolo".to_string()), - bixin: AXbtPattern::new(client.clone(), "bixin".to_string()), - blockfills: AXbtPattern::new(client.clone(), "blockfills".to_string()), - braiinspool: AXbtPattern::new(client.clone(), "braiinspool".to_string()), - bravomining: AXbtPattern::new(client.clone(), "bravomining".to_string()), - btpool: AXbtPattern::new(client.clone(), "btpool".to_string()), - btccom: AXbtPattern::new(client.clone(), "btccom".to_string()), - btcdig: AXbtPattern::new(client.clone(), "btcdig".to_string()), - btcguild: AXbtPattern::new(client.clone(), "btcguild".to_string()), - btclab: AXbtPattern::new(client.clone(), "btclab".to_string()), - btcmp: AXbtPattern::new(client.clone(), "btcmp".to_string()), - btcnuggets: AXbtPattern::new(client.clone(), "btcnuggets".to_string()), - btcpoolparty: AXbtPattern::new(client.clone(), "btcpoolparty".to_string()), - btcserv: AXbtPattern::new(client.clone(), "btcserv".to_string()), - btctop: AXbtPattern::new(client.clone(), "btctop".to_string()), - btcc: AXbtPattern::new(client.clone(), "btcc".to_string()), - bwpool: AXbtPattern::new(client.clone(), "bwpool".to_string()), - bytepool: AXbtPattern::new(client.clone(), "bytepool".to_string()), - canoe: AXbtPattern::new(client.clone(), "canoe".to_string()), - canoepool: AXbtPattern::new(client.clone(), "canoepool".to_string()), - carbonnegative: AXbtPattern::new(client.clone(), "carbonnegative".to_string()), - ckpool: AXbtPattern::new(client.clone(), "ckpool".to_string()), - cloudhashing: AXbtPattern::new(client.clone(), "cloudhashing".to_string()), - coinlab: AXbtPattern::new(client.clone(), "coinlab".to_string()), - cointerra: AXbtPattern::new(client.clone(), "cointerra".to_string()), - connectbtc: AXbtPattern::new(client.clone(), "connectbtc".to_string()), - dpool: AXbtPattern::new(client.clone(), "dpool".to_string()), - dcexploration: AXbtPattern::new(client.clone(), "dcexploration".to_string()), - dcex: AXbtPattern::new(client.clone(), "dcex".to_string()), - digitalbtc: AXbtPattern::new(client.clone(), "digitalbtc".to_string()), - digitalxmintsy: AXbtPattern::new(client.clone(), "digitalxmintsy".to_string()), - eclipsemc: AXbtPattern::new(client.clone(), "eclipsemc".to_string()), - eightbaochi: AXbtPattern::new(client.clone(), "eightbaochi".to_string()), - ekanembtc: AXbtPattern::new(client.clone(), "ekanembtc".to_string()), - eligius: AXbtPattern::new(client.clone(), "eligius".to_string()), - emcdpool: AXbtPattern::new(client.clone(), "emcdpool".to_string()), - entrustcharitypool: AXbtPattern::new(client.clone(), "entrustcharitypool".to_string()), - eobot: AXbtPattern::new(client.clone(), "eobot".to_string()), - exxbw: AXbtPattern::new(client.clone(), "exxbw".to_string()), - f2pool: AXbtPattern::new(client.clone(), "f2pool".to_string()), - fiftyeightcoin: AXbtPattern::new(client.clone(), "fiftyeightcoin".to_string()), - foundryusa: AXbtPattern::new(client.clone(), "foundryusa".to_string()), - futurebitapollosolo: AXbtPattern::new(client.clone(), "futurebitapollosolo".to_string()), - gbminers: AXbtPattern::new(client.clone(), "gbminers".to_string()), - ghashio: AXbtPattern::new(client.clone(), "ghashio".to_string()), - givemecoins: AXbtPattern::new(client.clone(), "givemecoins".to_string()), - gogreenlight: AXbtPattern::new(client.clone(), "gogreenlight".to_string()), - haozhuzhu: AXbtPattern::new(client.clone(), "haozhuzhu".to_string()), - haominer: AXbtPattern::new(client.clone(), "haominer".to_string()), - hashbx: AXbtPattern::new(client.clone(), "hashbx".to_string()), - hashpool: AXbtPattern::new(client.clone(), "hashpool".to_string()), - helix: AXbtPattern::new(client.clone(), "helix".to_string()), - hhtt: AXbtPattern::new(client.clone(), "hhtt".to_string()), - hotpool: AXbtPattern::new(client.clone(), "hotpool".to_string()), - hummerpool: AXbtPattern::new(client.clone(), "hummerpool".to_string()), - huobipool: AXbtPattern::new(client.clone(), "huobipool".to_string()), - innopolistech: AXbtPattern::new(client.clone(), "innopolistech".to_string()), - kanopool: AXbtPattern::new(client.clone(), "kanopool".to_string()), - kncminer: AXbtPattern::new(client.clone(), "kncminer".to_string()), - kucoinpool: AXbtPattern::new(client.clone(), "kucoinpool".to_string()), - lubiancom: AXbtPattern::new(client.clone(), "lubiancom".to_string()), - luckypool: AXbtPattern::new(client.clone(), "luckypool".to_string()), - luxor: AXbtPattern::new(client.clone(), "luxor".to_string()), - marapool: AXbtPattern::new(client.clone(), "marapool".to_string()), - maxbtc: AXbtPattern::new(client.clone(), "maxbtc".to_string()), - maxipool: AXbtPattern::new(client.clone(), "maxipool".to_string()), - megabigpower: AXbtPattern::new(client.clone(), "megabigpower".to_string()), - minerium: AXbtPattern::new(client.clone(), "minerium".to_string()), - miningcity: AXbtPattern::new(client.clone(), "miningcity".to_string()), - miningdutch: AXbtPattern::new(client.clone(), "miningdutch".to_string()), - miningkings: AXbtPattern::new(client.clone(), "miningkings".to_string()), - miningsquared: AXbtPattern::new(client.clone(), "miningsquared".to_string()), - mmpool: AXbtPattern::new(client.clone(), "mmpool".to_string()), - mtred: AXbtPattern::new(client.clone(), "mtred".to_string()), - multicoinco: AXbtPattern::new(client.clone(), "multicoinco".to_string()), - multipool: AXbtPattern::new(client.clone(), "multipool".to_string()), - mybtccoinpool: AXbtPattern::new(client.clone(), "mybtccoinpool".to_string()), - neopool: AXbtPattern::new(client.clone(), "neopool".to_string()), - nexious: AXbtPattern::new(client.clone(), "nexious".to_string()), - nicehash: AXbtPattern::new(client.clone(), "nicehash".to_string()), - nmcbit: AXbtPattern::new(client.clone(), "nmcbit".to_string()), - novablock: AXbtPattern::new(client.clone(), "novablock".to_string()), - ocean: AXbtPattern::new(client.clone(), "ocean".to_string()), - okexpool: AXbtPattern::new(client.clone(), "okexpool".to_string()), - okminer: AXbtPattern::new(client.clone(), "okminer".to_string()), - okkong: AXbtPattern::new(client.clone(), "okkong".to_string()), - okpooltop: AXbtPattern::new(client.clone(), "okpooltop".to_string()), - onehash: AXbtPattern::new(client.clone(), "onehash".to_string()), - onem1x: AXbtPattern::new(client.clone(), "onem1x".to_string()), - onethash: AXbtPattern::new(client.clone(), "onethash".to_string()), - ozcoin: AXbtPattern::new(client.clone(), "ozcoin".to_string()), - phashio: AXbtPattern::new(client.clone(), "phashio".to_string()), - parasite: AXbtPattern::new(client.clone(), "parasite".to_string()), - patels: AXbtPattern::new(client.clone(), "patels".to_string()), - pegapool: AXbtPattern::new(client.clone(), "pegapool".to_string()), - phoenix: AXbtPattern::new(client.clone(), "phoenix".to_string()), - polmine: AXbtPattern::new(client.clone(), "polmine".to_string()), - pool175btc: AXbtPattern::new(client.clone(), "pool175btc".to_string()), - pool50btc: AXbtPattern::new(client.clone(), "pool50btc".to_string()), - poolin: AXbtPattern::new(client.clone(), "poolin".to_string()), - portlandhodl: AXbtPattern::new(client.clone(), "portlandhodl".to_string()), - publicpool: AXbtPattern::new(client.clone(), "publicpool".to_string()), - purebtccom: AXbtPattern::new(client.clone(), "purebtccom".to_string()), - rawpool: AXbtPattern::new(client.clone(), "rawpool".to_string()), - rigpool: AXbtPattern::new(client.clone(), "rigpool".to_string()), - sbicrypto: AXbtPattern::new(client.clone(), "sbicrypto".to_string()), - secpool: AXbtPattern::new(client.clone(), "secpool".to_string()), - secretsuperstar: AXbtPattern::new(client.clone(), "secretsuperstar".to_string()), - sevenpool: AXbtPattern::new(client.clone(), "sevenpool".to_string()), - shawnp0wers: AXbtPattern::new(client.clone(), "shawnp0wers".to_string()), - sigmapoolcom: AXbtPattern::new(client.clone(), "sigmapoolcom".to_string()), - simplecoinus: AXbtPattern::new(client.clone(), "simplecoinus".to_string()), - solock: AXbtPattern::new(client.clone(), "solock".to_string()), - spiderpool: AXbtPattern::new(client.clone(), "spiderpool".to_string()), - stminingcorp: AXbtPattern::new(client.clone(), "stminingcorp".to_string()), - tangpool: AXbtPattern::new(client.clone(), "tangpool".to_string()), - tatmaspool: AXbtPattern::new(client.clone(), "tatmaspool".to_string()), - tbdice: AXbtPattern::new(client.clone(), "tbdice".to_string()), - telco214: AXbtPattern::new(client.clone(), "telco214".to_string()), - terrapool: AXbtPattern::new(client.clone(), "terrapool".to_string()), - tiger: AXbtPattern::new(client.clone(), "tiger".to_string()), - tigerpoolnet: AXbtPattern::new(client.clone(), "tigerpoolnet".to_string()), - titan: AXbtPattern::new(client.clone(), "titan".to_string()), - transactioncoinmining: AXbtPattern::new(client.clone(), "transactioncoinmining".to_string()), - trickysbtcpool: AXbtPattern::new(client.clone(), "trickysbtcpool".to_string()), - triplemining: AXbtPattern::new(client.clone(), "triplemining".to_string()), - twentyoneinc: AXbtPattern::new(client.clone(), "twentyoneinc".to_string()), - ultimuspool: AXbtPattern::new(client.clone(), "ultimuspool".to_string()), - unknown: AXbtPattern::new(client.clone(), "unknown".to_string()), - unomp: AXbtPattern::new(client.clone(), "unomp".to_string()), - viabtc: AXbtPattern::new(client.clone(), "viabtc".to_string()), - waterhole: AXbtPattern::new(client.clone(), "waterhole".to_string()), - wayicn: AXbtPattern::new(client.clone(), "wayicn".to_string()), - whitepool: AXbtPattern::new(client.clone(), "whitepool".to_string()), - wk057: AXbtPattern::new(client.clone(), "wk057".to_string()), - yourbtcnet: AXbtPattern::new(client.clone(), "yourbtcnet".to_string()), - zulupool: AXbtPattern::new(client.clone(), "zulupool".to_string()), + aaopool: AaopoolPattern::new(client.clone(), "aaopool".to_string()), + antpool: AaopoolPattern::new(client.clone(), "antpool".to_string()), + arkpool: AaopoolPattern::new(client.clone(), "arkpool".to_string()), + asicminer: AaopoolPattern::new(client.clone(), "asicminer".to_string()), + axbt: AaopoolPattern::new(client.clone(), "axbt".to_string()), + batpool: AaopoolPattern::new(client.clone(), "batpool".to_string()), + bcmonster: AaopoolPattern::new(client.clone(), "bcmonster".to_string()), + bcpoolio: AaopoolPattern::new(client.clone(), "bcpoolio".to_string()), + binancepool: AaopoolPattern::new(client.clone(), "binancepool".to_string()), + bitalo: AaopoolPattern::new(client.clone(), "bitalo".to_string()), + bitclub: AaopoolPattern::new(client.clone(), "bitclub".to_string()), + bitcoinaffiliatenetwork: AaopoolPattern::new(client.clone(), "bitcoinaffiliatenetwork".to_string()), + bitcoincom: AaopoolPattern::new(client.clone(), "bitcoincom".to_string()), + bitcoinindia: AaopoolPattern::new(client.clone(), "bitcoinindia".to_string()), + bitcoinrussia: AaopoolPattern::new(client.clone(), "bitcoinrussia".to_string()), + bitcoinukraine: AaopoolPattern::new(client.clone(), "bitcoinukraine".to_string()), + bitfarms: AaopoolPattern::new(client.clone(), "bitfarms".to_string()), + bitfufupool: AaopoolPattern::new(client.clone(), "bitfufupool".to_string()), + bitfury: AaopoolPattern::new(client.clone(), "bitfury".to_string()), + bitminter: AaopoolPattern::new(client.clone(), "bitminter".to_string()), + bitparking: AaopoolPattern::new(client.clone(), "bitparking".to_string()), + bitsolo: AaopoolPattern::new(client.clone(), "bitsolo".to_string()), + bixin: AaopoolPattern::new(client.clone(), "bixin".to_string()), + blockfills: AaopoolPattern::new(client.clone(), "blockfills".to_string()), + braiinspool: AaopoolPattern::new(client.clone(), "braiinspool".to_string()), + bravomining: AaopoolPattern::new(client.clone(), "bravomining".to_string()), + btcc: AaopoolPattern::new(client.clone(), "btcc".to_string()), + btccom: AaopoolPattern::new(client.clone(), "btccom".to_string()), + btcdig: AaopoolPattern::new(client.clone(), "btcdig".to_string()), + btcguild: AaopoolPattern::new(client.clone(), "btcguild".to_string()), + btclab: AaopoolPattern::new(client.clone(), "btclab".to_string()), + btcmp: AaopoolPattern::new(client.clone(), "btcmp".to_string()), + btcnuggets: AaopoolPattern::new(client.clone(), "btcnuggets".to_string()), + btcpoolparty: AaopoolPattern::new(client.clone(), "btcpoolparty".to_string()), + btcserv: AaopoolPattern::new(client.clone(), "btcserv".to_string()), + btctop: AaopoolPattern::new(client.clone(), "btctop".to_string()), + btpool: AaopoolPattern::new(client.clone(), "btpool".to_string()), + bwpool: AaopoolPattern::new(client.clone(), "bwpool".to_string()), + bytepool: AaopoolPattern::new(client.clone(), "bytepool".to_string()), + canoe: AaopoolPattern::new(client.clone(), "canoe".to_string()), + canoepool: AaopoolPattern::new(client.clone(), "canoepool".to_string()), + carbonnegative: AaopoolPattern::new(client.clone(), "carbonnegative".to_string()), + ckpool: AaopoolPattern::new(client.clone(), "ckpool".to_string()), + cloudhashing: AaopoolPattern::new(client.clone(), "cloudhashing".to_string()), + coinlab: AaopoolPattern::new(client.clone(), "coinlab".to_string()), + cointerra: AaopoolPattern::new(client.clone(), "cointerra".to_string()), + connectbtc: AaopoolPattern::new(client.clone(), "connectbtc".to_string()), + dcex: AaopoolPattern::new(client.clone(), "dcex".to_string()), + dcexploration: AaopoolPattern::new(client.clone(), "dcexploration".to_string()), + digitalbtc: AaopoolPattern::new(client.clone(), "digitalbtc".to_string()), + digitalxmintsy: AaopoolPattern::new(client.clone(), "digitalxmintsy".to_string()), + dpool: AaopoolPattern::new(client.clone(), "dpool".to_string()), + eclipsemc: AaopoolPattern::new(client.clone(), "eclipsemc".to_string()), + eightbaochi: AaopoolPattern::new(client.clone(), "eightbaochi".to_string()), + ekanembtc: AaopoolPattern::new(client.clone(), "ekanembtc".to_string()), + eligius: AaopoolPattern::new(client.clone(), "eligius".to_string()), + emcdpool: AaopoolPattern::new(client.clone(), "emcdpool".to_string()), + entrustcharitypool: AaopoolPattern::new(client.clone(), "entrustcharitypool".to_string()), + eobot: AaopoolPattern::new(client.clone(), "eobot".to_string()), + exxbw: AaopoolPattern::new(client.clone(), "exxbw".to_string()), + f2pool: AaopoolPattern::new(client.clone(), "f2pool".to_string()), + fiftyeightcoin: AaopoolPattern::new(client.clone(), "fiftyeightcoin".to_string()), + foundryusa: AaopoolPattern::new(client.clone(), "foundryusa".to_string()), + futurebitapollosolo: AaopoolPattern::new(client.clone(), "futurebitapollosolo".to_string()), + gbminers: AaopoolPattern::new(client.clone(), "gbminers".to_string()), + ghashio: AaopoolPattern::new(client.clone(), "ghashio".to_string()), + givemecoins: AaopoolPattern::new(client.clone(), "givemecoins".to_string()), + gogreenlight: AaopoolPattern::new(client.clone(), "gogreenlight".to_string()), + haominer: AaopoolPattern::new(client.clone(), "haominer".to_string()), + haozhuzhu: AaopoolPattern::new(client.clone(), "haozhuzhu".to_string()), + hashbx: AaopoolPattern::new(client.clone(), "hashbx".to_string()), + hashpool: AaopoolPattern::new(client.clone(), "hashpool".to_string()), + helix: AaopoolPattern::new(client.clone(), "helix".to_string()), + hhtt: AaopoolPattern::new(client.clone(), "hhtt".to_string()), + hotpool: AaopoolPattern::new(client.clone(), "hotpool".to_string()), + hummerpool: AaopoolPattern::new(client.clone(), "hummerpool".to_string()), + huobipool: AaopoolPattern::new(client.clone(), "huobipool".to_string()), + innopolistech: AaopoolPattern::new(client.clone(), "innopolistech".to_string()), + kanopool: AaopoolPattern::new(client.clone(), "kanopool".to_string()), + kncminer: AaopoolPattern::new(client.clone(), "kncminer".to_string()), + kucoinpool: AaopoolPattern::new(client.clone(), "kucoinpool".to_string()), + lubiancom: AaopoolPattern::new(client.clone(), "lubiancom".to_string()), + luckypool: AaopoolPattern::new(client.clone(), "luckypool".to_string()), + luxor: AaopoolPattern::new(client.clone(), "luxor".to_string()), + marapool: AaopoolPattern::new(client.clone(), "marapool".to_string()), + maxbtc: AaopoolPattern::new(client.clone(), "maxbtc".to_string()), + maxipool: AaopoolPattern::new(client.clone(), "maxipool".to_string()), + megabigpower: AaopoolPattern::new(client.clone(), "megabigpower".to_string()), + minerium: AaopoolPattern::new(client.clone(), "minerium".to_string()), + miningcity: AaopoolPattern::new(client.clone(), "miningcity".to_string()), + miningdutch: AaopoolPattern::new(client.clone(), "miningdutch".to_string()), + miningkings: AaopoolPattern::new(client.clone(), "miningkings".to_string()), + miningsquared: AaopoolPattern::new(client.clone(), "miningsquared".to_string()), + mmpool: AaopoolPattern::new(client.clone(), "mmpool".to_string()), + mtred: AaopoolPattern::new(client.clone(), "mtred".to_string()), + multicoinco: AaopoolPattern::new(client.clone(), "multicoinco".to_string()), + multipool: AaopoolPattern::new(client.clone(), "multipool".to_string()), + mybtccoinpool: AaopoolPattern::new(client.clone(), "mybtccoinpool".to_string()), + neopool: AaopoolPattern::new(client.clone(), "neopool".to_string()), + nexious: AaopoolPattern::new(client.clone(), "nexious".to_string()), + nicehash: AaopoolPattern::new(client.clone(), "nicehash".to_string()), + nmcbit: AaopoolPattern::new(client.clone(), "nmcbit".to_string()), + novablock: AaopoolPattern::new(client.clone(), "novablock".to_string()), + ocean: AaopoolPattern::new(client.clone(), "ocean".to_string()), + okexpool: AaopoolPattern::new(client.clone(), "okexpool".to_string()), + okkong: AaopoolPattern::new(client.clone(), "okkong".to_string()), + okminer: AaopoolPattern::new(client.clone(), "okminer".to_string()), + okpooltop: AaopoolPattern::new(client.clone(), "okpooltop".to_string()), + onehash: AaopoolPattern::new(client.clone(), "onehash".to_string()), + onem1x: AaopoolPattern::new(client.clone(), "onem1x".to_string()), + onethash: AaopoolPattern::new(client.clone(), "onethash".to_string()), + ozcoin: AaopoolPattern::new(client.clone(), "ozcoin".to_string()), + parasite: AaopoolPattern::new(client.clone(), "parasite".to_string()), + patels: AaopoolPattern::new(client.clone(), "patels".to_string()), + pegapool: AaopoolPattern::new(client.clone(), "pegapool".to_string()), + phashio: AaopoolPattern::new(client.clone(), "phashio".to_string()), + phoenix: AaopoolPattern::new(client.clone(), "phoenix".to_string()), + polmine: AaopoolPattern::new(client.clone(), "polmine".to_string()), + pool175btc: AaopoolPattern::new(client.clone(), "pool175btc".to_string()), + pool50btc: AaopoolPattern::new(client.clone(), "pool50btc".to_string()), + poolin: AaopoolPattern::new(client.clone(), "poolin".to_string()), + portlandhodl: AaopoolPattern::new(client.clone(), "portlandhodl".to_string()), + publicpool: AaopoolPattern::new(client.clone(), "publicpool".to_string()), + purebtccom: AaopoolPattern::new(client.clone(), "purebtccom".to_string()), + rawpool: AaopoolPattern::new(client.clone(), "rawpool".to_string()), + rigpool: AaopoolPattern::new(client.clone(), "rigpool".to_string()), + sbicrypto: AaopoolPattern::new(client.clone(), "sbicrypto".to_string()), + secpool: AaopoolPattern::new(client.clone(), "secpool".to_string()), + secretsuperstar: AaopoolPattern::new(client.clone(), "secretsuperstar".to_string()), + sevenpool: AaopoolPattern::new(client.clone(), "sevenpool".to_string()), + shawnp0wers: AaopoolPattern::new(client.clone(), "shawnp0wers".to_string()), + sigmapoolcom: AaopoolPattern::new(client.clone(), "sigmapoolcom".to_string()), + simplecoinus: AaopoolPattern::new(client.clone(), "simplecoinus".to_string()), + solock: AaopoolPattern::new(client.clone(), "solock".to_string()), + spiderpool: AaopoolPattern::new(client.clone(), "spiderpool".to_string()), + stminingcorp: AaopoolPattern::new(client.clone(), "stminingcorp".to_string()), + tangpool: AaopoolPattern::new(client.clone(), "tangpool".to_string()), + tatmaspool: AaopoolPattern::new(client.clone(), "tatmaspool".to_string()), + tbdice: AaopoolPattern::new(client.clone(), "tbdice".to_string()), + telco214: AaopoolPattern::new(client.clone(), "telco214".to_string()), + terrapool: AaopoolPattern::new(client.clone(), "terrapool".to_string()), + tiger: AaopoolPattern::new(client.clone(), "tiger".to_string()), + tigerpoolnet: AaopoolPattern::new(client.clone(), "tigerpoolnet".to_string()), + titan: AaopoolPattern::new(client.clone(), "titan".to_string()), + transactioncoinmining: AaopoolPattern::new(client.clone(), "transactioncoinmining".to_string()), + trickysbtcpool: AaopoolPattern::new(client.clone(), "trickysbtcpool".to_string()), + triplemining: AaopoolPattern::new(client.clone(), "triplemining".to_string()), + twentyoneinc: AaopoolPattern::new(client.clone(), "twentyoneinc".to_string()), + ultimuspool: AaopoolPattern::new(client.clone(), "ultimuspool".to_string()), + unknown: AaopoolPattern::new(client.clone(), "unknown".to_string()), + unomp: AaopoolPattern::new(client.clone(), "unomp".to_string()), + viabtc: AaopoolPattern::new(client.clone(), "viabtc".to_string()), + waterhole: AaopoolPattern::new(client.clone(), "waterhole".to_string()), + wayicn: AaopoolPattern::new(client.clone(), "wayicn".to_string()), + whitepool: AaopoolPattern::new(client.clone(), "whitepool".to_string()), + wk057: AaopoolPattern::new(client.clone(), "wk057".to_string()), + yourbtcnet: AaopoolPattern::new(client.clone(), "yourbtcnet".to_string()), + zulupool: AaopoolPattern::new(client.clone(), "zulupool".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Positions { - pub position: MetricPattern17, +pub struct CatalogTree_Positions { + pub position: MetricPattern14, } -impl CatalogTree_Computed_Positions { +impl CatalogTree_Positions { pub fn new(client: Arc, base_path: String) -> Self { Self { - position: MetricPattern17::new(client.clone(), format!("{base_path}_position")), + position: MetricPattern14::new(client.clone(), format!("{base_path}_position")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Price { - pub ohlc: CatalogTree_Computed_Price_Ohlc, - pub sats: CatalogTree_Computed_Price_Sats, - pub usd: CatalogTree_Computed_Price_Usd, +pub struct CatalogTree_Price { + pub cents: CatalogTree_Price_Cents, + pub sats: CatalogTree_Price_Sats, + pub usd: CatalogTree_Price_Usd, } -impl CatalogTree_Computed_Price { +impl CatalogTree_Price { pub fn new(client: Arc, base_path: String) -> Self { Self { - ohlc: CatalogTree_Computed_Price_Ohlc::new(client.clone(), format!("{base_path}_ohlc")), - sats: CatalogTree_Computed_Price_Sats::new(client.clone(), format!("{base_path}_sats")), - usd: CatalogTree_Computed_Price_Usd::new(client.clone(), format!("{base_path}_usd")), + cents: CatalogTree_Price_Cents::new(client.clone(), format!("{base_path}_cents")), + sats: CatalogTree_Price_Sats::new(client.clone(), format!("{base_path}_sats")), + usd: CatalogTree_Price_Usd::new(client.clone(), format!("{base_path}_usd")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Price_Ohlc { - pub ohlc_in_cents: MetricPattern10, +pub struct CatalogTree_Price_Cents { + pub ohlc: MetricPattern7, } -impl CatalogTree_Computed_Price_Ohlc { +impl CatalogTree_Price_Cents { pub fn new(client: Arc, base_path: String) -> Self { Self { - ohlc_in_cents: MetricPattern10::new(client.clone(), format!("{base_path}_ohlc_in_cents")), + ohlc: MetricPattern7::new(client.clone(), format!("{base_path}_ohlc")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Price_Sats { - pub price_close_in_sats: MetricPattern1, - pub price_high_in_sats: PriceHighInSatsPattern, - pub price_low_in_sats: PriceLowInSatsPattern, - pub price_ohlc_in_sats: MetricPattern1, - pub price_open_in_sats: MetricPattern1, +pub struct CatalogTree_Price_Sats { + pub price_close_sats: MetricPattern1, + pub price_high_sats: PriceHighSatsPattern, + pub price_low_sats: PriceHighSatsPattern, + pub price_ohlc_sats: MetricPattern1, + pub price_open_sats: MetricPattern1, } -impl CatalogTree_Computed_Price_Sats { +impl CatalogTree_Price_Sats { pub fn new(client: Arc, base_path: String) -> Self { Self { - price_close_in_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_close_in_sats")), - price_high_in_sats: PriceHighInSatsPattern::new(client.clone(), "price_high_in_sats".to_string()), - price_low_in_sats: PriceLowInSatsPattern::new(client.clone(), "price_low_in_sats".to_string()), - price_ohlc_in_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_ohlc_in_sats")), - price_open_in_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_open_in_sats")), + price_close_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_close_sats")), + price_high_sats: PriceHighSatsPattern::new(client.clone(), "price_high_sats".to_string()), + price_low_sats: PriceHighSatsPattern::new(client.clone(), "price_low_sats".to_string()), + price_ohlc_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_ohlc_sats")), + price_open_sats: MetricPattern1::new(client.clone(), format!("{base_path}_price_open_sats")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Price_Usd { +pub struct CatalogTree_Price_Usd { pub price_close: MetricPattern1, - pub price_close_in_cents: MetricPattern10, - pub price_high: PriceHighInSatsPattern, - pub price_high_in_cents: MetricPattern10, - pub price_low: PriceLowInSatsPattern, - pub price_low_in_cents: MetricPattern10, + pub price_close_cents: MetricPattern7, + pub price_high: PriceHighSatsPattern, + pub price_high_cents: MetricPattern7, + pub price_low: PriceHighSatsPattern, + pub price_low_cents: MetricPattern7, pub price_ohlc: MetricPattern1, pub price_open: MetricPattern1, - pub price_open_in_cents: MetricPattern10, + pub price_open_cents: MetricPattern7, } -impl CatalogTree_Computed_Price_Usd { +impl CatalogTree_Price_Usd { pub fn new(client: Arc, base_path: String) -> Self { Self { price_close: MetricPattern1::new(client.clone(), format!("{base_path}_price_close")), - price_close_in_cents: MetricPattern10::new(client.clone(), format!("{base_path}_price_close_in_cents")), - price_high: PriceHighInSatsPattern::new(client.clone(), "price_high".to_string()), - price_high_in_cents: MetricPattern10::new(client.clone(), format!("{base_path}_price_high_in_cents")), - price_low: PriceLowInSatsPattern::new(client.clone(), "price_low".to_string()), - price_low_in_cents: MetricPattern10::new(client.clone(), format!("{base_path}_price_low_in_cents")), + price_close_cents: MetricPattern7::new(client.clone(), format!("{base_path}_price_close_cents")), + price_high: PriceHighSatsPattern::new(client.clone(), "price_high".to_string()), + price_high_cents: MetricPattern7::new(client.clone(), format!("{base_path}_price_high_cents")), + price_low: PriceHighSatsPattern::new(client.clone(), "price_low".to_string()), + price_low_cents: MetricPattern7::new(client.clone(), format!("{base_path}_price_low_cents")), price_ohlc: MetricPattern1::new(client.clone(), format!("{base_path}_price_ohlc")), price_open: MetricPattern1::new(client.clone(), format!("{base_path}_price_open")), - price_open_in_cents: MetricPattern10::new(client.clone(), format!("{base_path}_price_open_in_cents")), + price_open_cents: MetricPattern7::new(client.clone(), format!("{base_path}_price_open_cents")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Scripts { - pub count: CatalogTree_Computed_Scripts_Count, - pub value: CatalogTree_Computed_Scripts_Value, +pub struct CatalogTree_Scripts { + pub count: CatalogTree_Scripts_Count, + pub empty_to_txindex: MetricPattern22, + pub first_emptyoutputindex: MetricPattern23, + pub first_opreturnindex: MetricPattern23, + pub first_p2msoutputindex: MetricPattern23, + pub first_unknownoutputindex: MetricPattern23, + pub opreturn_to_txindex: MetricPattern25, + pub p2ms_to_txindex: MetricPattern28, + pub unknown_to_txindex: MetricPattern37, + pub value: CatalogTree_Scripts_Value, } -impl CatalogTree_Computed_Scripts { +impl CatalogTree_Scripts { pub fn new(client: Arc, base_path: String) -> Self { Self { - count: CatalogTree_Computed_Scripts_Count::new(client.clone(), format!("{base_path}_count")), - value: CatalogTree_Computed_Scripts_Value::new(client.clone(), format!("{base_path}_value")), + count: CatalogTree_Scripts_Count::new(client.clone(), format!("{base_path}_count")), + empty_to_txindex: MetricPattern22::new(client.clone(), format!("{base_path}_empty_to_txindex")), + first_emptyoutputindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_emptyoutputindex")), + first_opreturnindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_opreturnindex")), + first_p2msoutputindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_p2msoutputindex")), + first_unknownoutputindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_unknownoutputindex")), + opreturn_to_txindex: MetricPattern25::new(client.clone(), format!("{base_path}_opreturn_to_txindex")), + p2ms_to_txindex: MetricPattern28::new(client.clone(), format!("{base_path}_p2ms_to_txindex")), + unknown_to_txindex: MetricPattern37::new(client.clone(), format!("{base_path}_unknown_to_txindex")), + value: CatalogTree_Scripts_Value::new(client.clone(), format!("{base_path}_value")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Scripts_Count { - pub emptyoutput_count: DollarsPattern, - pub opreturn_count: DollarsPattern, - pub p2a_count: DollarsPattern, - pub p2ms_count: DollarsPattern, - pub p2pk33_count: DollarsPattern, - pub p2pk65_count: DollarsPattern, - pub p2pkh_count: DollarsPattern, - pub p2sh_count: DollarsPattern, - pub p2tr_count: DollarsPattern, - pub p2wpkh_count: DollarsPattern, - pub p2wsh_count: DollarsPattern, - pub segwit_adoption: BlockCountPattern, - pub segwit_count: DollarsPattern, - pub taproot_adoption: BlockCountPattern, - pub unknownoutput_count: DollarsPattern, +pub struct CatalogTree_Scripts_Count { + pub emptyoutput: BitcoinPattern, + pub opreturn: BitcoinPattern, + pub p2a: BitcoinPattern, + pub p2ms: BitcoinPattern, + pub p2pk33: BitcoinPattern, + pub p2pk65: BitcoinPattern, + pub p2pkh: BitcoinPattern, + pub p2sh: BitcoinPattern, + pub p2tr: BitcoinPattern, + pub p2wpkh: BitcoinPattern, + pub p2wsh: BitcoinPattern, + pub segwit: BitcoinPattern, + pub segwit_adoption: SatsPattern, + pub taproot_adoption: SatsPattern, + pub unknownoutput: BitcoinPattern, } -impl CatalogTree_Computed_Scripts_Count { +impl CatalogTree_Scripts_Count { pub fn new(client: Arc, base_path: String) -> Self { Self { - emptyoutput_count: DollarsPattern::new(client.clone(), "emptyoutput_count".to_string()), - opreturn_count: DollarsPattern::new(client.clone(), "opreturn_count".to_string()), - p2a_count: DollarsPattern::new(client.clone(), "p2a_count".to_string()), - p2ms_count: DollarsPattern::new(client.clone(), "p2ms_count".to_string()), - p2pk33_count: DollarsPattern::new(client.clone(), "p2pk33_count".to_string()), - p2pk65_count: DollarsPattern::new(client.clone(), "p2pk65_count".to_string()), - p2pkh_count: DollarsPattern::new(client.clone(), "p2pkh_count".to_string()), - p2sh_count: DollarsPattern::new(client.clone(), "p2sh_count".to_string()), - p2tr_count: DollarsPattern::new(client.clone(), "p2tr_count".to_string()), - p2wpkh_count: DollarsPattern::new(client.clone(), "p2wpkh_count".to_string()), - p2wsh_count: DollarsPattern::new(client.clone(), "p2wsh_count".to_string()), - segwit_adoption: BlockCountPattern::new(client.clone(), "segwit_adoption".to_string()), - segwit_count: DollarsPattern::new(client.clone(), "segwit_count".to_string()), - taproot_adoption: BlockCountPattern::new(client.clone(), "taproot_adoption".to_string()), - unknownoutput_count: DollarsPattern::new(client.clone(), "unknownoutput_count".to_string()), + emptyoutput: BitcoinPattern::new(client.clone(), "emptyoutput_count".to_string()), + opreturn: BitcoinPattern::new(client.clone(), "opreturn_count".to_string()), + p2a: BitcoinPattern::new(client.clone(), "p2a_count".to_string()), + p2ms: BitcoinPattern::new(client.clone(), "p2ms_count".to_string()), + p2pk33: BitcoinPattern::new(client.clone(), "p2pk33_count".to_string()), + p2pk65: BitcoinPattern::new(client.clone(), "p2pk65_count".to_string()), + p2pkh: BitcoinPattern::new(client.clone(), "p2pkh_count".to_string()), + p2sh: BitcoinPattern::new(client.clone(), "p2sh_count".to_string()), + p2tr: BitcoinPattern::new(client.clone(), "p2tr_count".to_string()), + p2wpkh: BitcoinPattern::new(client.clone(), "p2wpkh_count".to_string()), + p2wsh: BitcoinPattern::new(client.clone(), "p2wsh_count".to_string()), + segwit: BitcoinPattern::new(client.clone(), "segwit_count".to_string()), + segwit_adoption: SatsPattern::new(client.clone(), "segwit_adoption".to_string()), + taproot_adoption: SatsPattern::new(client.clone(), "taproot_adoption".to_string()), + unknownoutput: BitcoinPattern::new(client.clone(), "unknownoutput_count".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Scripts_Value { - pub opreturn_value: CoinbasePattern, +pub struct CatalogTree_Scripts_Value { + pub opreturn: CoinbasePattern, } -impl CatalogTree_Computed_Scripts_Value { +impl CatalogTree_Scripts_Value { pub fn new(client: Arc, base_path: String) -> Self { Self { - opreturn_value: CoinbasePattern::new(client.clone(), "opreturn_value".to_string()), + opreturn: CoinbasePattern::new(client.clone(), "opreturn_value".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Supply { - pub burned: CatalogTree_Computed_Supply_Burned, - pub circulating: CatalogTree_Computed_Supply_Circulating, - pub inflation: CatalogTree_Computed_Supply_Inflation, - pub market_cap: CatalogTree_Computed_Supply_MarketCap, - pub velocity: CatalogTree_Computed_Supply_Velocity, +pub struct CatalogTree_Supply { + pub burned: CatalogTree_Supply_Burned, + pub circulating: ActiveSupplyPattern, + pub inflation: MetricPattern4, + pub market_cap: MetricPattern3, + pub velocity: CatalogTree_Supply_Velocity, } -impl CatalogTree_Computed_Supply { +impl CatalogTree_Supply { pub fn new(client: Arc, base_path: String) -> Self { Self { - burned: CatalogTree_Computed_Supply_Burned::new(client.clone(), format!("{base_path}_burned")), - circulating: CatalogTree_Computed_Supply_Circulating::new(client.clone(), format!("{base_path}_circulating")), - inflation: CatalogTree_Computed_Supply_Inflation::new(client.clone(), format!("{base_path}_inflation")), - market_cap: CatalogTree_Computed_Supply_MarketCap::new(client.clone(), format!("{base_path}_market_cap")), - velocity: CatalogTree_Computed_Supply_Velocity::new(client.clone(), format!("{base_path}_velocity")), + burned: CatalogTree_Supply_Burned::new(client.clone(), format!("{base_path}_burned")), + circulating: ActiveSupplyPattern::new(client.clone(), "circulating".to_string()), + inflation: MetricPattern4::new(client.clone(), format!("{base_path}_inflation")), + market_cap: MetricPattern3::new(client.clone(), format!("{base_path}_market_cap")), + velocity: CatalogTree_Supply_Velocity::new(client.clone(), format!("{base_path}_velocity")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Supply_Burned { +pub struct CatalogTree_Supply_Burned { pub opreturn: UnclaimedRewardsPattern, pub unspendable: UnclaimedRewardsPattern, } -impl CatalogTree_Computed_Supply_Burned { +impl CatalogTree_Supply_Burned { pub fn new(client: Arc, base_path: String) -> Self { Self { opreturn: UnclaimedRewardsPattern::new(client.clone(), "opreturn_supply".to_string()), @@ -6826,210 +6531,139 @@ impl CatalogTree_Computed_Supply_Burned { } /// Catalog tree node. -pub struct CatalogTree_Computed_Supply_Circulating { - pub btc: MetricPattern26, - pub indexes: ActiveSupplyPattern, - pub sats: MetricPattern26, - pub usd: MetricPattern26, +pub struct CatalogTree_Supply_Velocity { + pub btc: MetricPattern4, + pub usd: MetricPattern4, } -impl CatalogTree_Computed_Supply_Circulating { +impl CatalogTree_Supply_Velocity { pub fn new(client: Arc, base_path: String) -> Self { Self { - btc: MetricPattern26::new(client.clone(), format!("{base_path}_btc")), - indexes: ActiveSupplyPattern::new(client.clone(), "circulating".to_string()), - sats: MetricPattern26::new(client.clone(), format!("{base_path}_sats")), - usd: MetricPattern26::new(client.clone(), format!("{base_path}_usd")), + btc: MetricPattern4::new(client.clone(), format!("{base_path}_btc")), + usd: MetricPattern4::new(client.clone(), format!("{base_path}_usd")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Supply_Inflation { - pub indexes: IndexesPattern2, +pub struct CatalogTree_Transactions { + pub base_size: MetricPattern36, + pub count: CatalogTree_Transactions_Count, + pub fees: CatalogTree_Transactions_Fees, + pub first_txindex: MetricPattern23, + pub first_txinindex: MetricPattern36, + pub first_txoutindex: MetricPattern36, + pub height: MetricPattern36, + pub is_explicitly_rbf: MetricPattern36, + pub rawlocktime: MetricPattern36, + pub size: CatalogTree_Transactions_Size, + pub total_size: MetricPattern36, + pub txid: MetricPattern36, + pub txversion: MetricPattern36, + pub versions: CatalogTree_Transactions_Versions, + pub volume: CatalogTree_Transactions_Volume, } -impl CatalogTree_Computed_Supply_Inflation { +impl CatalogTree_Transactions { pub fn new(client: Arc, base_path: String) -> Self { Self { - indexes: IndexesPattern2::new(client.clone(), "inflation_rate".to_string()), + base_size: MetricPattern36::new(client.clone(), format!("{base_path}_base_size")), + count: CatalogTree_Transactions_Count::new(client.clone(), format!("{base_path}_count")), + fees: CatalogTree_Transactions_Fees::new(client.clone(), format!("{base_path}_fees")), + first_txindex: MetricPattern23::new(client.clone(), format!("{base_path}_first_txindex")), + first_txinindex: MetricPattern36::new(client.clone(), format!("{base_path}_first_txinindex")), + first_txoutindex: MetricPattern36::new(client.clone(), format!("{base_path}_first_txoutindex")), + height: MetricPattern36::new(client.clone(), format!("{base_path}_height")), + is_explicitly_rbf: MetricPattern36::new(client.clone(), format!("{base_path}_is_explicitly_rbf")), + rawlocktime: MetricPattern36::new(client.clone(), format!("{base_path}_rawlocktime")), + size: CatalogTree_Transactions_Size::new(client.clone(), format!("{base_path}_size")), + total_size: MetricPattern36::new(client.clone(), format!("{base_path}_total_size")), + txid: MetricPattern36::new(client.clone(), format!("{base_path}_txid")), + txversion: MetricPattern36::new(client.clone(), format!("{base_path}_txversion")), + versions: CatalogTree_Transactions_Versions::new(client.clone(), format!("{base_path}_versions")), + volume: CatalogTree_Transactions_Volume::new(client.clone(), format!("{base_path}_volume")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Supply_MarketCap { - pub height: MetricPattern26, - pub indexes: MetricPattern5, +pub struct CatalogTree_Transactions_Count { + pub is_coinbase: MetricPattern36, + pub tx_count: BitcoinPattern, } -impl CatalogTree_Computed_Supply_MarketCap { +impl CatalogTree_Transactions_Count { pub fn new(client: Arc, base_path: String) -> Self { Self { - height: MetricPattern26::new(client.clone(), format!("{base_path}_height")), - indexes: MetricPattern5::new(client.clone(), format!("{base_path}_indexes")), + is_coinbase: MetricPattern36::new(client.clone(), format!("{base_path}_is_coinbase")), + tx_count: BitcoinPattern::new(client.clone(), "tx_count".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Supply_Velocity { - pub btc: IndexesPattern2, - pub usd: IndexesPattern2, +pub struct CatalogTree_Transactions_Fees { + pub fee: CatalogTree_Transactions_Fees_Fee, + pub fee_rate: IntervalPattern, + pub input_value: MetricPattern36, + pub output_value: MetricPattern36, } -impl CatalogTree_Computed_Supply_Velocity { +impl CatalogTree_Transactions_Fees { pub fn new(client: Arc, base_path: String) -> Self { Self { - btc: IndexesPattern2::new(client.clone(), "btc_velocity".to_string()), - usd: IndexesPattern2::new(client.clone(), "usd_velocity".to_string()), + fee: CatalogTree_Transactions_Fees_Fee::new(client.clone(), format!("{base_path}_fee")), + fee_rate: IntervalPattern::new(client.clone(), "fee_rate".to_string()), + input_value: MetricPattern36::new(client.clone(), format!("{base_path}_input_value")), + output_value: MetricPattern36::new(client.clone(), format!("{base_path}_output_value")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Transactions { - pub count: CatalogTree_Computed_Transactions_Count, - pub fees: CatalogTree_Computed_Transactions_Fees, - pub size: CatalogTree_Computed_Transactions_Size, - pub versions: CatalogTree_Computed_Transactions_Versions, - pub volume: CatalogTree_Computed_Transactions_Volume, +pub struct CatalogTree_Transactions_Fees_Fee { + pub bitcoin: BlockSizePattern, + pub dollars: BlockSizePattern, + pub sats: BitcoinPattern, } -impl CatalogTree_Computed_Transactions { +impl CatalogTree_Transactions_Fees_Fee { pub fn new(client: Arc, base_path: String) -> Self { Self { - count: CatalogTree_Computed_Transactions_Count::new(client.clone(), format!("{base_path}_count")), - fees: CatalogTree_Computed_Transactions_Fees::new(client.clone(), format!("{base_path}_fees")), - size: CatalogTree_Computed_Transactions_Size::new(client.clone(), format!("{base_path}_size")), - versions: CatalogTree_Computed_Transactions_Versions::new(client.clone(), format!("{base_path}_versions")), - volume: CatalogTree_Computed_Transactions_Volume::new(client.clone(), format!("{base_path}_volume")), + bitcoin: BlockSizePattern::new(client.clone(), "fee_btc".to_string()), + dollars: BlockSizePattern::new(client.clone(), "fee_usd".to_string()), + sats: BitcoinPattern::new(client.clone(), "fee".to_string()), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Transactions_Count { - pub is_coinbase: MetricPattern39, - pub tx_count: DollarsPattern, +pub struct CatalogTree_Transactions_Size { + pub tx_vsize: TxVsizePattern, + pub tx_weight: TxVsizePattern, + pub vsize: MetricPattern36, + pub weight: MetricPattern36, } -impl CatalogTree_Computed_Transactions_Count { +impl CatalogTree_Transactions_Size { pub fn new(client: Arc, base_path: String) -> Self { Self { - is_coinbase: MetricPattern39::new(client.clone(), format!("{base_path}_is_coinbase")), - tx_count: DollarsPattern::new(client.clone(), "tx_count".to_string()), + tx_vsize: TxVsizePattern::new(client.clone(), "tx_vsize".to_string()), + tx_weight: TxVsizePattern::new(client.clone(), "tx_weight".to_string()), + vsize: MetricPattern36::new(client.clone(), format!("{base_path}_vsize")), + weight: MetricPattern36::new(client.clone(), format!("{base_path}_weight")), } } } /// Catalog tree node. -pub struct CatalogTree_Computed_Transactions_Fees { - pub fee: CatalogTree_Computed_Transactions_Fees_Fee, - pub fee_rate: CatalogTree_Computed_Transactions_Fees_FeeRate, - pub input_value: MetricPattern39, - pub output_value: MetricPattern39, -} - -impl CatalogTree_Computed_Transactions_Fees { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - fee: CatalogTree_Computed_Transactions_Fees_Fee::new(client.clone(), format!("{base_path}_fee")), - fee_rate: CatalogTree_Computed_Transactions_Fees_FeeRate::new(client.clone(), format!("{base_path}_fee_rate")), - input_value: MetricPattern39::new(client.clone(), format!("{base_path}_input_value")), - output_value: MetricPattern39::new(client.clone(), format!("{base_path}_output_value")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Transactions_Fees_Fee { - pub base: MetricPattern39, - pub bitcoin: CatalogTree_Computed_Transactions_Fees_Fee_Bitcoin, - pub dollars: CountPattern2, - pub sats: CountPattern2, -} - -impl CatalogTree_Computed_Transactions_Fees_Fee { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - base: MetricPattern39::new(client.clone(), format!("{base_path}_base")), - bitcoin: CatalogTree_Computed_Transactions_Fees_Fee_Bitcoin::new(client.clone(), format!("{base_path}_bitcoin")), - dollars: CountPattern2::new(client.clone(), "fee_usd".to_string()), - sats: CountPattern2::new(client.clone(), "fee".to_string()), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Transactions_Fees_Fee_Bitcoin { - pub average: MetricPattern1, - pub cumulative: MetricPattern1, - pub max: MetricPattern1, - pub min: MetricPattern1, - pub sum: MetricPattern1, -} - -impl CatalogTree_Computed_Transactions_Fees_Fee_Bitcoin { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - average: MetricPattern1::new(client.clone(), format!("{base_path}_average")), - cumulative: MetricPattern1::new(client.clone(), format!("{base_path}_cumulative")), - max: MetricPattern1::new(client.clone(), format!("{base_path}_max")), - min: MetricPattern1::new(client.clone(), format!("{base_path}_min")), - sum: MetricPattern1::new(client.clone(), format!("{base_path}_sum")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Transactions_Fees_FeeRate { - pub average: MetricPattern1, - pub base: MetricPattern39, - pub max: MetricPattern1, - pub min: MetricPattern1, - pub percentiles: PercentilesPattern, -} - -impl CatalogTree_Computed_Transactions_Fees_FeeRate { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - average: MetricPattern1::new(client.clone(), format!("{base_path}_average")), - base: MetricPattern39::new(client.clone(), format!("{base_path}_base")), - max: MetricPattern1::new(client.clone(), format!("{base_path}_max")), - min: MetricPattern1::new(client.clone(), format!("{base_path}_min")), - percentiles: PercentilesPattern::new(client.clone(), "fee_rate".to_string()), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Transactions_Size { - pub tx_vsize: BlockIntervalPattern, - pub tx_weight: BlockIntervalPattern, - pub vsize: MetricPattern39, - pub weight: MetricPattern39, -} - -impl CatalogTree_Computed_Transactions_Size { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - tx_vsize: BlockIntervalPattern::new(client.clone(), "tx_vsize".to_string()), - tx_weight: BlockIntervalPattern::new(client.clone(), "tx_weight".to_string()), - vsize: MetricPattern39::new(client.clone(), format!("{base_path}_vsize")), - weight: MetricPattern39::new(client.clone(), format!("{base_path}_weight")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Transactions_Versions { +pub struct CatalogTree_Transactions_Versions { pub tx_v1: BlockCountPattern, pub tx_v2: BlockCountPattern, pub tx_v3: BlockCountPattern, } -impl CatalogTree_Computed_Transactions_Versions { +impl CatalogTree_Transactions_Versions { pub fn new(client: Arc, base_path: String) -> Self { Self { tx_v1: BlockCountPattern::new(client.clone(), "tx_v1".to_string()), @@ -7040,224 +6674,26 @@ impl CatalogTree_Computed_Transactions_Versions { } /// Catalog tree node. -pub struct CatalogTree_Computed_Transactions_Volume { - pub annualized_volume: MetricPattern5, - pub annualized_volume_btc: MetricPattern5, - pub annualized_volume_usd: MetricPattern5, - pub inputs_per_sec: MetricPattern5, - pub outputs_per_sec: MetricPattern5, - pub sent_sum: CatalogTree_Computed_Transactions_Volume_SentSum, - pub tx_per_sec: MetricPattern5, +pub struct CatalogTree_Transactions_Volume { + pub annualized_volume: MetricPattern4, + pub annualized_volume_btc: MetricPattern4, + pub annualized_volume_usd: MetricPattern4, + pub inputs_per_sec: MetricPattern4, + pub outputs_per_sec: MetricPattern4, + pub sent_sum: ActiveSupplyPattern, + pub tx_per_sec: MetricPattern4, } -impl CatalogTree_Computed_Transactions_Volume { +impl CatalogTree_Transactions_Volume { pub fn new(client: Arc, base_path: String) -> Self { Self { - annualized_volume: MetricPattern5::new(client.clone(), format!("{base_path}_annualized_volume")), - annualized_volume_btc: MetricPattern5::new(client.clone(), format!("{base_path}_annualized_volume_btc")), - annualized_volume_usd: MetricPattern5::new(client.clone(), format!("{base_path}_annualized_volume_usd")), - inputs_per_sec: MetricPattern5::new(client.clone(), format!("{base_path}_inputs_per_sec")), - outputs_per_sec: MetricPattern5::new(client.clone(), format!("{base_path}_outputs_per_sec")), - sent_sum: CatalogTree_Computed_Transactions_Volume_SentSum::new(client.clone(), format!("{base_path}_sent_sum")), - tx_per_sec: MetricPattern5::new(client.clone(), format!("{base_path}_tx_per_sec")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Computed_Transactions_Volume_SentSum { - pub bitcoin: MetricPattern1, - pub dollars: DifficultyAdjustmentPattern, - pub sats: DifficultyAdjustmentPattern, -} - -impl CatalogTree_Computed_Transactions_Volume_SentSum { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - bitcoin: MetricPattern1::new(client.clone(), format!("{base_path}_bitcoin")), - dollars: DifficultyAdjustmentPattern::new(client.clone(), "sent_sum_usd".to_string()), - sats: DifficultyAdjustmentPattern::new(client.clone(), "sent_sum".to_string()), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Indexed { - pub address: CatalogTree_Indexed_Address, - pub block: CatalogTree_Indexed_Block, - pub output: CatalogTree_Indexed_Output, - pub tx: CatalogTree_Indexed_Tx, - pub txin: CatalogTree_Indexed_Txin, - pub txout: CatalogTree_Indexed_Txout, -} - -impl CatalogTree_Indexed { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - address: CatalogTree_Indexed_Address::new(client.clone(), format!("{base_path}_address")), - block: CatalogTree_Indexed_Block::new(client.clone(), format!("{base_path}_block")), - output: CatalogTree_Indexed_Output::new(client.clone(), format!("{base_path}_output")), - tx: CatalogTree_Indexed_Tx::new(client.clone(), format!("{base_path}_tx")), - txin: CatalogTree_Indexed_Txin::new(client.clone(), format!("{base_path}_txin")), - txout: CatalogTree_Indexed_Txout::new(client.clone(), format!("{base_path}_txout")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Indexed_Address { - pub first_p2aaddressindex: MetricPattern26, - pub first_p2pk33addressindex: MetricPattern26, - pub first_p2pk65addressindex: MetricPattern26, - pub first_p2pkhaddressindex: MetricPattern26, - pub first_p2shaddressindex: MetricPattern26, - pub first_p2traddressindex: MetricPattern26, - pub first_p2wpkhaddressindex: MetricPattern26, - pub first_p2wshaddressindex: MetricPattern26, - pub p2abytes: MetricPattern30, - pub p2pk33bytes: MetricPattern32, - pub p2pk65bytes: MetricPattern33, - pub p2pkhbytes: MetricPattern34, - pub p2shbytes: MetricPattern35, - pub p2trbytes: MetricPattern36, - pub p2wpkhbytes: MetricPattern37, - pub p2wshbytes: MetricPattern38, -} - -impl CatalogTree_Indexed_Address { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - first_p2aaddressindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_p2aaddressindex")), - first_p2pk33addressindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_p2pk33addressindex")), - first_p2pk65addressindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_p2pk65addressindex")), - first_p2pkhaddressindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_p2pkhaddressindex")), - first_p2shaddressindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_p2shaddressindex")), - first_p2traddressindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_p2traddressindex")), - first_p2wpkhaddressindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_p2wpkhaddressindex")), - first_p2wshaddressindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_p2wshaddressindex")), - p2abytes: MetricPattern30::new(client.clone(), format!("{base_path}_p2abytes")), - p2pk33bytes: MetricPattern32::new(client.clone(), format!("{base_path}_p2pk33bytes")), - p2pk65bytes: MetricPattern33::new(client.clone(), format!("{base_path}_p2pk65bytes")), - p2pkhbytes: MetricPattern34::new(client.clone(), format!("{base_path}_p2pkhbytes")), - p2shbytes: MetricPattern35::new(client.clone(), format!("{base_path}_p2shbytes")), - p2trbytes: MetricPattern36::new(client.clone(), format!("{base_path}_p2trbytes")), - p2wpkhbytes: MetricPattern37::new(client.clone(), format!("{base_path}_p2wpkhbytes")), - p2wshbytes: MetricPattern38::new(client.clone(), format!("{base_path}_p2wshbytes")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Indexed_Block { - pub blockhash: MetricPattern26, - pub difficulty: MetricPattern26, - pub timestamp: MetricPattern26, - pub total_size: MetricPattern26, - pub weight: MetricPattern26, -} - -impl CatalogTree_Indexed_Block { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - blockhash: MetricPattern26::new(client.clone(), format!("{base_path}_blockhash")), - difficulty: MetricPattern26::new(client.clone(), format!("{base_path}_difficulty")), - timestamp: MetricPattern26::new(client.clone(), format!("{base_path}_timestamp")), - total_size: MetricPattern26::new(client.clone(), format!("{base_path}_total_size")), - weight: MetricPattern26::new(client.clone(), format!("{base_path}_weight")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Indexed_Output { - pub first_emptyoutputindex: MetricPattern26, - pub first_opreturnindex: MetricPattern26, - pub first_p2msoutputindex: MetricPattern26, - pub first_unknownoutputindex: MetricPattern26, - pub txindex: MetricPattern8, -} - -impl CatalogTree_Indexed_Output { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - first_emptyoutputindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_emptyoutputindex")), - first_opreturnindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_opreturnindex")), - first_p2msoutputindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_p2msoutputindex")), - first_unknownoutputindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_unknownoutputindex")), - txindex: MetricPattern8::new(client.clone(), format!("{base_path}_txindex")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Indexed_Tx { - pub base_size: MetricPattern39, - pub first_txindex: MetricPattern26, - pub first_txinindex: MetricPattern39, - pub first_txoutindex: MetricPattern39, - pub height: MetricPattern39, - pub is_explicitly_rbf: MetricPattern39, - pub rawlocktime: MetricPattern39, - pub total_size: MetricPattern39, - pub txid: MetricPattern39, - pub txversion: MetricPattern39, -} - -impl CatalogTree_Indexed_Tx { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - base_size: MetricPattern39::new(client.clone(), format!("{base_path}_base_size")), - first_txindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_txindex")), - first_txinindex: MetricPattern39::new(client.clone(), format!("{base_path}_first_txinindex")), - first_txoutindex: MetricPattern39::new(client.clone(), format!("{base_path}_first_txoutindex")), - height: MetricPattern39::new(client.clone(), format!("{base_path}_height")), - is_explicitly_rbf: MetricPattern39::new(client.clone(), format!("{base_path}_is_explicitly_rbf")), - rawlocktime: MetricPattern39::new(client.clone(), format!("{base_path}_rawlocktime")), - total_size: MetricPattern39::new(client.clone(), format!("{base_path}_total_size")), - txid: MetricPattern39::new(client.clone(), format!("{base_path}_txid")), - txversion: MetricPattern39::new(client.clone(), format!("{base_path}_txversion")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Indexed_Txin { - pub first_txinindex: MetricPattern26, - pub outpoint: MetricPattern27, - pub outputtype: MetricPattern27, - pub txindex: MetricPattern27, - pub typeindex: MetricPattern27, -} - -impl CatalogTree_Indexed_Txin { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - first_txinindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_txinindex")), - outpoint: MetricPattern27::new(client.clone(), format!("{base_path}_outpoint")), - outputtype: MetricPattern27::new(client.clone(), format!("{base_path}_outputtype")), - txindex: MetricPattern27::new(client.clone(), format!("{base_path}_txindex")), - typeindex: MetricPattern27::new(client.clone(), format!("{base_path}_typeindex")), - } - } -} - -/// Catalog tree node. -pub struct CatalogTree_Indexed_Txout { - pub first_txoutindex: MetricPattern26, - pub outputtype: MetricPattern29, - pub txindex: MetricPattern29, - pub typeindex: MetricPattern29, - pub value: MetricPattern29, -} - -impl CatalogTree_Indexed_Txout { - pub fn new(client: Arc, base_path: String) -> Self { - Self { - first_txoutindex: MetricPattern26::new(client.clone(), format!("{base_path}_first_txoutindex")), - outputtype: MetricPattern29::new(client.clone(), format!("{base_path}_outputtype")), - txindex: MetricPattern29::new(client.clone(), format!("{base_path}_txindex")), - typeindex: MetricPattern29::new(client.clone(), format!("{base_path}_typeindex")), - value: MetricPattern29::new(client.clone(), format!("{base_path}_value")), + annualized_volume: MetricPattern4::new(client.clone(), format!("{base_path}_annualized_volume")), + annualized_volume_btc: MetricPattern4::new(client.clone(), format!("{base_path}_annualized_volume_btc")), + annualized_volume_usd: MetricPattern4::new(client.clone(), format!("{base_path}_annualized_volume_usd")), + inputs_per_sec: MetricPattern4::new(client.clone(), format!("{base_path}_inputs_per_sec")), + outputs_per_sec: MetricPattern4::new(client.clone(), format!("{base_path}_outputs_per_sec")), + sent_sum: ActiveSupplyPattern::new(client.clone(), "sent_sum".to_string()), + tx_per_sec: MetricPattern4::new(client.clone(), format!("{base_path}_tx_per_sec")), } } } @@ -7421,12 +6857,12 @@ impl BrkClient { /// Get metric data /// /// Fetch data for a specific metric at the given index. Use query parameters to filter by date range and format (json/csv). - pub fn get_metric_by_index(&self, metric: &str, index: &str, from: Option<&str>, to: Option<&str>, count: Option<&str>, format: Option<&str>) -> Result { + pub fn get_metric_by_index(&self, index: &str, metric: &str, count: Option<&str>, format: Option<&str>, from: Option<&str>, to: Option<&str>) -> Result { let mut query = Vec::new(); - if let Some(v) = from { query.push(format!("from={}", v)); } - if let Some(v) = to { query.push(format!("to={}", v)); } if let Some(v) = count { query.push(format!("count={}", v)); } if let Some(v) = format { query.push(format!("format={}", v)); } + if let Some(v) = from { query.push(format!("from={}", v)); } + if let Some(v) = to { query.push(format!("to={}", v)); } let query_str = if query.is_empty() { String::new() } else { format!("?{}", query.join("&")) }; self.base.get(&format!("/api/metric/{metric}/{index}{}", query_str)) } @@ -7434,14 +6870,14 @@ impl BrkClient { /// Bulk metric data /// /// Fetch multiple metrics in a single request. Supports filtering by index and date range. Returns an array of MetricData objects. - pub fn get_metrics_bulk(&self, metrics: &str, index: &str, from: Option<&str>, to: Option<&str>, count: Option<&str>, format: Option<&str>) -> Result> { + pub fn get_metrics_bulk(&self, count: Option<&str>, format: Option<&str>, from: Option<&str>, index: &str, metrics: &str, to: Option<&str>) -> Result> { let mut query = Vec::new(); - query.push(format!("metrics={}", metrics)); - query.push(format!("index={}", index)); - if let Some(v) = from { query.push(format!("from={}", v)); } - if let Some(v) = to { query.push(format!("to={}", v)); } if let Some(v) = count { query.push(format!("count={}", v)); } if let Some(v) = format { query.push(format!("format={}", v)); } + if let Some(v) = from { query.push(format!("from={}", v)); } + query.push(format!("index={}", index)); + query.push(format!("metrics={}", metrics)); + if let Some(v) = to { query.push(format!("to={}", v)); } let query_str = if query.is_empty() { String::new() } else { format!("?{}", query.join("&")) }; self.base.get(&format!("/api/metrics/bulk{}", query_str)) } diff --git a/crates/brk_computer/Cargo.toml b/crates/brk_computer/Cargo.toml index db7821c02..899af6b81 100644 --- a/crates/brk_computer/Cargo.toml +++ b/crates/brk_computer/Cargo.toml @@ -22,7 +22,7 @@ brk_store = { workspace = true } brk_traversable = { workspace = true } brk_types = { workspace = true } derive_more = { workspace = true } -log = { workspace = true } +tracing = { workspace = true } pco = "0.4.9" rayon = { workspace = true } rustc-hash = { workspace = true } diff --git a/crates/brk_computer/examples/computer_bench.rs b/crates/brk_computer/examples/computer_bench.rs index 03e9e20ba..37cb24ae6 100644 --- a/crates/brk_computer/examples/computer_bench.rs +++ b/crates/brk_computer/examples/computer_bench.rs @@ -9,7 +9,7 @@ use brk_indexer::Indexer; use brk_iterator::Blocks; use brk_reader::Reader; use brk_rpc::{Auth, Client}; -use log::{debug, info}; +use tracing::{debug, info}; use vecdb::Exit; pub fn main() -> Result<()> { diff --git a/crates/brk_computer/examples/computer_read.rs b/crates/brk_computer/examples/computer_read.rs index c959a67a1..e2766882d 100644 --- a/crates/brk_computer/examples/computer_read.rs +++ b/crates/brk_computer/examples/computer_read.rs @@ -30,7 +30,7 @@ fn run() -> Result<()> { let computer = Computer::forced_import(&outputs_dir, &indexer, Some(fetcher))?; - let _a = dbg!(computer.transactions.fees.txindex_to_fee.region().meta()); + let _a = dbg!(computer.transactions.fees.fee.base.region().meta()); Ok(()) } diff --git a/crates/brk_computer/examples/full_bench.rs b/crates/brk_computer/examples/full_bench.rs index 9c04e14c9..265a193ab 100644 --- a/crates/brk_computer/examples/full_bench.rs +++ b/crates/brk_computer/examples/full_bench.rs @@ -14,7 +14,7 @@ use brk_indexer::Indexer; use brk_iterator::Blocks; use brk_reader::Reader; use brk_rpc::{Auth, Client}; -use log::{debug, info}; +use tracing::{debug, info}; use vecdb::Exit; pub fn main() -> color_eyre::Result<()> { diff --git a/crates/brk_computer/src/blocks/count/compute.rs b/crates/brk_computer/src/blocks/count/compute.rs index 7469bd5fb..ece1d52db 100644 --- a/crates/brk_computer/src/blocks/count/compute.rs +++ b/crates/brk_computer/src/blocks/count/compute.rs @@ -1,11 +1,11 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{Height, StoredU32}; -use vecdb::{Exit, TypedVecIterator}; +use vecdb::{EagerVec, Exit, PcoVec, TypedVecIterator}; use super::super::time; use super::Vecs; -use crate::{indexes, ComputeIndexes}; +use crate::{ComputeIndexes, indexes, internal::ComputedBlockLast}; impl Vecs { pub fn compute( @@ -16,67 +16,104 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let mut height_to_timestamp_fixed_iter = - time.height_to_timestamp_fixed.into_iter(); - let mut prev = Height::ZERO; - self.height_to_24h_block_count.compute_transform( - starting_indexes.height, - &time.height_to_timestamp_fixed, - |(h, t, ..)| { - while t.difference_in_days_between(height_to_timestamp_fixed_iter.get_unwrap(prev)) - > 0 - { - prev.increment(); - if prev > h { - unreachable!() - } - } - (h, StoredU32::from(*h + 1 - *prev)) - }, - exit, - )?; - - self.indexes_to_block_count + self.block_count .compute_all(indexes, starting_indexes, exit, |v| { v.compute_range( starting_indexes.height, - &indexer.vecs.block.height_to_weight, + &indexer.vecs.blocks.weight, |h| (h, StoredU32::from(1_u32)), exit, )?; Ok(()) })?; - self.indexes_to_1w_block_count.compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - self.indexes_to_block_count.dateindex.sum.inner(), - 7, - exit, - )?; - Ok(()) - })?; + // Compute rolling window starts + self.compute_rolling_start(time, starting_indexes, exit, 1, |s| &mut s._24h_start)?; + self.compute_rolling_start(time, starting_indexes, exit, 7, |s| &mut s._1w_start)?; + self.compute_rolling_start(time, starting_indexes, exit, 30, |s| &mut s._1m_start)?; + self.compute_rolling_start(time, starting_indexes, exit, 365, |s| &mut s._1y_start)?; - self.indexes_to_1m_block_count.compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - self.indexes_to_block_count.dateindex.sum.inner(), - 30, - exit, - )?; - Ok(()) - })?; - - self.indexes_to_1y_block_count.compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - self.indexes_to_block_count.dateindex.sum.inner(), - 365, - exit, - )?; - Ok(()) - })?; + // Compute rolling window block counts + self.compute_rolling_block_count( + indexes, + starting_indexes, + exit, + &self._24h_start.clone(), + |s| &mut s._24h_block_count, + )?; + self.compute_rolling_block_count( + indexes, + starting_indexes, + exit, + &self._1w_start.clone(), + |s| &mut s._1w_block_count, + )?; + self.compute_rolling_block_count( + indexes, + starting_indexes, + exit, + &self._1m_start.clone(), + |s| &mut s._1m_block_count, + )?; + self.compute_rolling_block_count( + indexes, + starting_indexes, + exit, + &self._1y_start.clone(), + |s| &mut s._1y_block_count, + )?; Ok(()) } + + fn compute_rolling_start( + &mut self, + time: &time::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + days: usize, + get_field: F, + ) -> Result<()> + where + F: FnOnce(&mut Self) -> &mut EagerVec>, + { + let mut iter = time.timestamp_fixed.into_iter(); + let mut prev = Height::ZERO; + Ok(get_field(self).compute_transform( + starting_indexes.height, + &time.timestamp_fixed, + |(h, t, ..)| { + while t.difference_in_days_between(iter.get_unwrap(prev)) >= days { + prev.increment(); + if prev > h { + unreachable!() + } + } + (h, prev) + }, + exit, + )?) + } + + fn compute_rolling_block_count( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + start_height: &EagerVec>, + get_field: F, + ) -> Result<()> + where + F: FnOnce(&mut Self) -> &mut ComputedBlockLast, + { + get_field(self).compute_all(indexes, starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.height, + start_height, + |(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)), + exit, + )?; + Ok(()) + }) + } } diff --git a/crates/brk_computer/src/blocks/count/import.rs b/crates/brk_computer/src/blocks/count/import.rs index 26e36385a..ca181b457 100644 --- a/crates/brk_computer/src/blocks/count/import.rs +++ b/crates/brk_computer/src/blocks/count/import.rs @@ -1,6 +1,6 @@ use brk_error::Result; use brk_types::{StoredU64, Version}; -use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1}; +use vecdb::{Database, ImportableVec}; use super::Vecs; use crate::{ @@ -10,74 +10,48 @@ use crate::{ TARGET_BLOCKS_PER_YEAR, }, indexes, - internal::{ComputedBlockSumCum, ComputedDateLast}, + internal::{ComputedBlockLast, ComputedBlockSumCum, LazyPeriodVecs}, }; impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - dateindex_to_block_count_target: LazyVecFrom1::init( + block_count_target: LazyPeriodVecs::new( "block_count_target", version, - indexes.time.dateindex_to_dateindex.boxed_clone(), + indexes, |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DAY)), - ), - weekindex_to_block_count_target: LazyVecFrom1::init( - "block_count_target", - version, - indexes.time.weekindex_to_weekindex.boxed_clone(), |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_WEEK)), - ), - monthindex_to_block_count_target: LazyVecFrom1::init( - "block_count_target", - version, - indexes.time.monthindex_to_monthindex.boxed_clone(), |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_MONTH)), - ), - quarterindex_to_block_count_target: LazyVecFrom1::init( - "block_count_target", - version, - indexes.time.quarterindex_to_quarterindex.boxed_clone(), |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_QUARTER)), - ), - semesterindex_to_block_count_target: LazyVecFrom1::init( - "block_count_target", - version, - indexes.time.semesterindex_to_semesterindex.boxed_clone(), |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_SEMESTER)), - ), - yearindex_to_block_count_target: LazyVecFrom1::init( - "block_count_target", - version, - indexes.time.yearindex_to_yearindex.boxed_clone(), |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_YEAR)), - ), - decadeindex_to_block_count_target: LazyVecFrom1::init( - "block_count_target", - version, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), |_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DECADE)), ), - height_to_24h_block_count: EagerVec::forced_import(db, "24h_block_count", version)?, - indexes_to_block_count: ComputedBlockSumCum::forced_import( + block_count: ComputedBlockSumCum::forced_import(db, "block_count", version, indexes)?, + _24h_start: ImportableVec::forced_import(db, "24h_start", version)?, + _1w_start: ImportableVec::forced_import(db, "1w_start", version)?, + _1m_start: ImportableVec::forced_import(db, "1m_start", version)?, + _1y_start: ImportableVec::forced_import(db, "1y_start", version)?, + _24h_block_count: ComputedBlockLast::forced_import( db, - "block_count", + "24h_block_count", version, indexes, )?, - indexes_to_1w_block_count: ComputedDateLast::forced_import( + _1w_block_count: ComputedBlockLast::forced_import( db, "1w_block_count", version, indexes, )?, - indexes_to_1m_block_count: ComputedDateLast::forced_import( + _1m_block_count: ComputedBlockLast::forced_import( db, "1m_block_count", version, indexes, )?, - indexes_to_1y_block_count: ComputedDateLast::forced_import( + _1y_block_count: ComputedBlockLast::forced_import( db, "1y_block_count", version, diff --git a/crates/brk_computer/src/blocks/count/vecs.rs b/crates/brk_computer/src/blocks/count/vecs.rs index 5d78a2fd1..31c268093 100644 --- a/crates/brk_computer/src/blocks/count/vecs.rs +++ b/crates/brk_computer/src/blocks/count/vecs.rs @@ -1,28 +1,21 @@ use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, StoredU32, StoredU64, - WeekIndex, YearIndex, -}; -use vecdb::LazyVecFrom1; +use brk_types::{Height, StoredU32, StoredU64}; +use vecdb::{EagerVec, PcoVec}; -use crate::internal::{ComputedBlockSumCum, ComputedDateLast}; +use crate::internal::{ComputedBlockLast, ComputedBlockSumCum, LazyPeriodVecs}; #[derive(Clone, Traversable)] pub struct Vecs { - pub dateindex_to_block_count_target: LazyVecFrom1, - pub weekindex_to_block_count_target: LazyVecFrom1, - pub monthindex_to_block_count_target: - LazyVecFrom1, - pub quarterindex_to_block_count_target: - LazyVecFrom1, - pub semesterindex_to_block_count_target: - LazyVecFrom1, - pub yearindex_to_block_count_target: LazyVecFrom1, - pub decadeindex_to_block_count_target: - LazyVecFrom1, - pub height_to_24h_block_count: vecdb::EagerVec>, - pub indexes_to_block_count: ComputedBlockSumCum, - pub indexes_to_1w_block_count: ComputedDateLast, - pub indexes_to_1m_block_count: ComputedDateLast, - pub indexes_to_1y_block_count: ComputedDateLast, + pub block_count_target: LazyPeriodVecs, + pub block_count: ComputedBlockSumCum, + // Rolling window starts (height-indexed only, no date aggregation needed) + pub _24h_start: EagerVec>, + pub _1w_start: EagerVec>, + pub _1m_start: EagerVec>, + pub _1y_start: EagerVec>, + // Rolling window block counts + pub _24h_block_count: ComputedBlockLast, + pub _1w_block_count: ComputedBlockLast, + pub _1m_block_count: ComputedBlockLast, + pub _1y_block_count: ComputedBlockLast, } diff --git a/crates/brk_computer/src/blocks/difficulty/compute.rs b/crates/brk_computer/src/blocks/difficulty/compute.rs index e2a945385..8af2963d4 100644 --- a/crates/brk_computer/src/blocks/difficulty/compute.rs +++ b/crates/brk_computer/src/blocks/difficulty/compute.rs @@ -2,9 +2,9 @@ use brk_error::Result; use brk_types::StoredU32; use vecdb::{Exit, TypedVecIterator}; -use super::Vecs; use super::super::TARGET_BLOCKS_PER_DAY_F32; -use crate::{indexes, ComputeIndexes}; +use super::Vecs; +use crate::{ComputeIndexes, indexes}; impl Vecs { pub fn compute( @@ -14,12 +14,13 @@ impl Vecs { exit: &Exit, ) -> Result<()> { let mut height_to_difficultyepoch_iter = - indexes.block.height_to_difficultyepoch.into_iter(); - self.indexes_to_difficultyepoch.compute_all(starting_indexes, exit, |vec| { - let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter(); + indexes.height.difficultyepoch.into_iter(); + self.difficultyepoch + .compute_all(starting_indexes, exit, |vec| { + let mut height_count_iter = indexes.dateindex.height_count.into_iter(); vec.compute_transform( starting_indexes.dateindex, - &indexes.time.dateindex_to_first_height, + &indexes.dateindex.first_height, |(di, height, ..)| { ( di, @@ -32,27 +33,35 @@ impl Vecs { Ok(()) })?; - self.indexes_to_blocks_before_next_difficulty_adjustment - .compute_all(indexes, starting_indexes, exit, |v| { + self.blocks_before_next_difficulty_adjustment.compute_all( + indexes, + starting_indexes, + exit, + |v| { v.compute_transform( starting_indexes.height, - &indexes.block.height_to_height, + &indexes.height.identity, |(h, ..)| (h, StoredU32::from(h.left_before_next_diff_adj())), exit, )?; Ok(()) - })?; + }, + )?; - self.indexes_to_days_before_next_difficulty_adjustment - .compute_all(indexes, starting_indexes, exit, |v| { + self.days_before_next_difficulty_adjustment.compute_all( + indexes, + starting_indexes, + exit, + |v| { v.compute_transform( starting_indexes.height, - &self.indexes_to_blocks_before_next_difficulty_adjustment.height, + &self.blocks_before_next_difficulty_adjustment.height, |(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()), exit, )?; Ok(()) - })?; + }, + )?; Ok(()) } diff --git a/crates/brk_computer/src/blocks/difficulty/import.rs b/crates/brk_computer/src/blocks/difficulty/import.rs index 034902bc3..6f811c78c 100644 --- a/crates/brk_computer/src/blocks/difficulty/import.rs +++ b/crates/brk_computer/src/blocks/difficulty/import.rs @@ -13,19 +13,19 @@ impl Vecs { let v2 = Version::TWO; Ok(Self { - indexes_to_difficultyepoch: ComputedDateLast::forced_import( + difficultyepoch: ComputedDateLast::forced_import( db, "difficultyepoch", version, indexes, )?, - indexes_to_blocks_before_next_difficulty_adjustment: ComputedBlockLast::forced_import( + blocks_before_next_difficulty_adjustment: ComputedBlockLast::forced_import( db, "blocks_before_next_difficulty_adjustment", version + v2, indexes, )?, - indexes_to_days_before_next_difficulty_adjustment: ComputedBlockLast::forced_import( + days_before_next_difficulty_adjustment: ComputedBlockLast::forced_import( db, "days_before_next_difficulty_adjustment", version + v2, diff --git a/crates/brk_computer/src/blocks/difficulty/vecs.rs b/crates/brk_computer/src/blocks/difficulty/vecs.rs index 7bfef635f..8fd8bf63f 100644 --- a/crates/brk_computer/src/blocks/difficulty/vecs.rs +++ b/crates/brk_computer/src/blocks/difficulty/vecs.rs @@ -6,7 +6,7 @@ use crate::internal::{ComputedBlockLast, ComputedDateLast}; /// Difficulty epoch metrics and countdown #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_difficultyepoch: ComputedDateLast, - pub indexes_to_blocks_before_next_difficulty_adjustment: ComputedBlockLast, - pub indexes_to_days_before_next_difficulty_adjustment: ComputedBlockLast, + pub difficultyepoch: ComputedDateLast, + pub blocks_before_next_difficulty_adjustment: ComputedBlockLast, + pub days_before_next_difficulty_adjustment: ComputedBlockLast, } diff --git a/crates/brk_computer/src/blocks/halving/compute.rs b/crates/brk_computer/src/blocks/halving/compute.rs index 48f9df86e..a60738cf1 100644 --- a/crates/brk_computer/src/blocks/halving/compute.rs +++ b/crates/brk_computer/src/blocks/halving/compute.rs @@ -2,9 +2,9 @@ use brk_error::Result; use brk_types::StoredU32; use vecdb::{Exit, TypedVecIterator}; -use super::Vecs; use super::super::TARGET_BLOCKS_PER_DAY_F32; -use crate::{indexes, ComputeIndexes}; +use super::Vecs; +use crate::{ComputeIndexes, indexes}; impl Vecs { pub fn compute( @@ -13,12 +13,13 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let mut height_to_halvingepoch_iter = indexes.block.height_to_halvingepoch.into_iter(); - self.indexes_to_halvingepoch.compute_all(starting_indexes, exit, |vec| { - let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter(); + let mut height_to_halvingepoch_iter = indexes.height.halvingepoch.into_iter(); + self.halvingepoch + .compute_all(starting_indexes, exit, |vec| { + let mut height_count_iter = indexes.dateindex.height_count.into_iter(); vec.compute_transform( starting_indexes.dateindex, - &indexes.time.dateindex_to_first_height, + &indexes.dateindex.first_height, |(di, height, ..)| { ( di, @@ -31,35 +32,27 @@ impl Vecs { Ok(()) })?; - self.indexes_to_blocks_before_next_halving.compute_all( - indexes, - starting_indexes, - exit, - |v| { + self.blocks_before_next_halving + .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform( starting_indexes.height, - &indexes.block.height_to_height, + &indexes.height.identity, |(h, ..)| (h, StoredU32::from(h.left_before_next_halving())), exit, )?; Ok(()) - }, - )?; + })?; - self.indexes_to_days_before_next_halving.compute_all( - indexes, - starting_indexes, - exit, - |v| { + self.days_before_next_halving + .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform( starting_indexes.height, - &self.indexes_to_blocks_before_next_halving.height, + &self.blocks_before_next_halving.height, |(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()), exit, )?; Ok(()) - }, - )?; + })?; Ok(()) } diff --git a/crates/brk_computer/src/blocks/halving/import.rs b/crates/brk_computer/src/blocks/halving/import.rs index 8604d2d0f..aaf37061e 100644 --- a/crates/brk_computer/src/blocks/halving/import.rs +++ b/crates/brk_computer/src/blocks/halving/import.rs @@ -13,19 +13,14 @@ impl Vecs { let v2 = Version::TWO; Ok(Self { - indexes_to_halvingepoch: ComputedDateLast::forced_import( - db, - "halvingepoch", - version, - indexes, - )?, - indexes_to_blocks_before_next_halving: ComputedBlockLast::forced_import( + halvingepoch: ComputedDateLast::forced_import(db, "halvingepoch", version, indexes)?, + blocks_before_next_halving: ComputedBlockLast::forced_import( db, "blocks_before_next_halving", version + v2, indexes, )?, - indexes_to_days_before_next_halving: ComputedBlockLast::forced_import( + days_before_next_halving: ComputedBlockLast::forced_import( db, "days_before_next_halving", version + v2, diff --git a/crates/brk_computer/src/blocks/halving/vecs.rs b/crates/brk_computer/src/blocks/halving/vecs.rs index a94bee6e6..40555af5b 100644 --- a/crates/brk_computer/src/blocks/halving/vecs.rs +++ b/crates/brk_computer/src/blocks/halving/vecs.rs @@ -6,7 +6,7 @@ use crate::internal::{ComputedBlockLast, ComputedDateLast}; /// Halving epoch metrics and countdown #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_halvingepoch: ComputedDateLast, - pub indexes_to_blocks_before_next_halving: ComputedBlockLast, - pub indexes_to_days_before_next_halving: ComputedBlockLast, + pub halvingepoch: ComputedDateLast, + pub blocks_before_next_halving: ComputedBlockLast, + pub days_before_next_halving: ComputedBlockLast, } diff --git a/crates/brk_computer/src/blocks/interval/compute.rs b/crates/brk_computer/src/blocks/interval/compute.rs index 5d0cdb732..a923a8147 100644 --- a/crates/brk_computer/src/blocks/interval/compute.rs +++ b/crates/brk_computer/src/blocks/interval/compute.rs @@ -2,7 +2,7 @@ use brk_error::Result; use vecdb::Exit; use super::Vecs; -use crate::{indexes, ComputeIndexes}; +use crate::{ComputeIndexes, indexes}; impl Vecs { pub fn compute( @@ -11,12 +11,7 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_block_interval.derive_from( - indexes, - starting_indexes, - &self.height_to_interval, - exit, - )?; + self.interval.derive_from(indexes, starting_indexes, exit)?; Ok(()) } diff --git a/crates/brk_computer/src/blocks/interval/import.rs b/crates/brk_computer/src/blocks/interval/import.rs index 40653e58a..82f195384 100644 --- a/crates/brk_computer/src/blocks/interval/import.rs +++ b/crates/brk_computer/src/blocks/interval/import.rs @@ -1,10 +1,10 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{CheckedSub, Height, Timestamp, Version}; -use vecdb::{Database, IterableCloneableVec, LazyVecFrom1}; +use vecdb::{Database, VecIndex}; use super::Vecs; -use crate::{indexes, internal::DerivedComputedBlockDistribution}; +use crate::{indexes, internal::LazyBlockDistribution}; impl Vecs { pub fn forced_import( @@ -13,34 +13,25 @@ impl Vecs { indexer: &Indexer, indexes: &indexes::Vecs, ) -> Result { - let height_to_interval = LazyVecFrom1::init( - "interval", + let interval = LazyBlockDistribution::forced_import_with_init( + db, + "block_interval", version, - indexer.vecs.block.height_to_timestamp.boxed_clone(), + indexer.vecs.blocks.timestamp.clone(), + indexes, |height: Height, timestamp_iter| { - let timestamp = timestamp_iter.get(height)?; + let timestamp = timestamp_iter.get_at(height.to_usize())?; let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| { timestamp_iter - .get(prev_h) + .get_at(prev_h.to_usize()) .map_or(Timestamp::ZERO, |prev_t| { timestamp.checked_sub(prev_t).unwrap_or(Timestamp::ZERO) }) }); Some(interval) }, - ); - - let indexes_to_block_interval = DerivedComputedBlockDistribution::forced_import( - db, - "block_interval", - height_to_interval.boxed_clone(), - version, - indexes, )?; - Ok(Self { - height_to_interval, - indexes_to_block_interval, - }) + Ok(Self { interval }) } } diff --git a/crates/brk_computer/src/blocks/interval/vecs.rs b/crates/brk_computer/src/blocks/interval/vecs.rs index 9bab6f663..6014dba17 100644 --- a/crates/brk_computer/src/blocks/interval/vecs.rs +++ b/crates/brk_computer/src/blocks/interval/vecs.rs @@ -1,11 +1,10 @@ use brk_traversable::Traversable; -use brk_types::{Height, Timestamp}; -use vecdb::LazyVecFrom1; +use brk_types::Timestamp; -use crate::internal::DerivedComputedBlockDistribution; +use crate::internal::LazyBlockDistribution; #[derive(Clone, Traversable)] pub struct Vecs { - pub height_to_interval: LazyVecFrom1, - pub indexes_to_block_interval: DerivedComputedBlockDistribution, + #[traversable(flatten)] + pub interval: LazyBlockDistribution, } diff --git a/crates/brk_computer/src/blocks/mining/compute.rs b/crates/brk_computer/src/blocks/mining/compute.rs index ffc0bea53..6e78a06ae 100644 --- a/crates/brk_computer/src/blocks/mining/compute.rs +++ b/crates/brk_computer/src/blocks/mining/compute.rs @@ -3,9 +3,9 @@ use brk_indexer::Indexer; use brk_types::{StoredF32, StoredF64}; use vecdb::Exit; +use super::super::{ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64, count, rewards}; use super::Vecs; -use super::super::{count, rewards, ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64}; -use crate::{indexes, ComputeIndexes}; +use crate::{ComputeIndexes, indexes}; impl Vecs { pub fn compute( @@ -17,31 +17,31 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_difficulty.derive_from( + self.difficulty.derive_from( indexes, starting_indexes, - &indexer.vecs.block.height_to_difficulty, + &indexer.vecs.blocks.difficulty, exit, )?; - self.indexes_to_difficulty_as_hash + self.difficulty_as_hash .compute_all(indexes, starting_indexes, exit, |v| { let multiplier = 2.0_f64.powi(32) / 600.0; v.compute_transform( starting_indexes.height, - &indexer.vecs.block.height_to_difficulty, + &indexer.vecs.blocks.difficulty, |(i, v, ..)| (i, StoredF32::from(*v * multiplier)), exit, )?; Ok(()) })?; - self.indexes_to_hash_rate + self.hash_rate .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.height, - &count_vecs.height_to_24h_block_count, - &self.indexes_to_difficulty_as_hash.height, + &count_vecs._24h_block_count.height, + &self.difficulty_as_hash.height, |(i, block_count_sum, difficulty_as_hash, ..)| { ( i, @@ -56,67 +56,67 @@ impl Vecs { Ok(()) })?; - self.indexes_to_hash_rate_1w_sma.compute_all(starting_indexes, exit, |v| { + self.hash_rate_1w_sma + .compute_all(starting_indexes, exit, |v| { v.compute_sma( starting_indexes.dateindex, - self.indexes_to_hash_rate.dateindex.inner(), + self.hash_rate.dateindex.inner(), 7, exit, )?; Ok(()) })?; - self.indexes_to_hash_rate_1m_sma.compute_all(starting_indexes, exit, |v| { + self.hash_rate_1m_sma + .compute_all(starting_indexes, exit, |v| { v.compute_sma( starting_indexes.dateindex, - self.indexes_to_hash_rate.dateindex.inner(), + self.hash_rate.dateindex.inner(), 30, exit, )?; Ok(()) })?; - self.indexes_to_hash_rate_2m_sma.compute_all(starting_indexes, exit, |v| { + self.hash_rate_2m_sma + .compute_all(starting_indexes, exit, |v| { v.compute_sma( starting_indexes.dateindex, - self.indexes_to_hash_rate.dateindex.inner(), + self.hash_rate.dateindex.inner(), 2 * 30, exit, )?; Ok(()) })?; - self.indexes_to_hash_rate_1y_sma.compute_all(starting_indexes, exit, |v| { + self.hash_rate_1y_sma + .compute_all(starting_indexes, exit, |v| { v.compute_sma( starting_indexes.dateindex, - self.indexes_to_hash_rate.dateindex.inner(), + self.hash_rate.dateindex.inner(), 365, exit, )?; Ok(()) })?; - self.indexes_to_difficulty_adjustment.compute_all( - indexes, - starting_indexes, - exit, - |v| { + self.difficulty_adjustment + .compute_all(indexes, starting_indexes, exit, |v| { v.compute_percentage_change( starting_indexes.height, - &indexer.vecs.block.height_to_difficulty, + &indexer.vecs.blocks.difficulty, 1, exit, )?; Ok(()) - }, - )?; + })?; - self.indexes_to_hash_price_ths + self.hash_price_ths .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.height, - &rewards_vecs.height_to_24h_coinbase_usd_sum, - &self.indexes_to_hash_rate.height, + rewards_vecs._24h_coinbase_sum.dollars.as_ref().unwrap(), + &self.hash_rate.height, |(i, coinbase_sum, hashrate, ..)| { let hashrate_ths = *hashrate / ONE_TERA_HASH; let price = if hashrate_ths == 0.0 { @@ -131,23 +131,23 @@ impl Vecs { Ok(()) })?; - self.indexes_to_hash_price_phs + self.hash_price_phs .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform( starting_indexes.height, - &self.indexes_to_hash_price_ths.height, + &self.hash_price_ths.height, |(i, price, ..)| (i, (*price * 1000.0).into()), exit, )?; Ok(()) })?; - self.indexes_to_hash_value_ths + self.hash_value_ths .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.height, - &rewards_vecs.height_to_24h_coinbase_sum, - &self.indexes_to_hash_rate.height, + &rewards_vecs._24h_coinbase_sum.sats, + &self.hash_rate.height, |(i, coinbase_sum, hashrate, ..)| { let hashrate_ths = *hashrate / ONE_TERA_HASH; let value = if hashrate_ths == 0.0 { @@ -162,78 +162,78 @@ impl Vecs { Ok(()) })?; - self.indexes_to_hash_value_phs + self.hash_value_phs .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform( starting_indexes.height, - &self.indexes_to_hash_value_ths.height, + &self.hash_value_ths.height, |(i, value, ..)| (i, (*value * 1000.0).into()), exit, )?; Ok(()) })?; - self.indexes_to_hash_price_ths_min + self.hash_price_ths_min .compute_all(indexes, starting_indexes, exit, |v| { v.compute_all_time_low_( starting_indexes.height, - &self.indexes_to_hash_price_ths.height, + &self.hash_price_ths.height, exit, true, )?; Ok(()) })?; - self.indexes_to_hash_price_phs_min + self.hash_price_phs_min .compute_all(indexes, starting_indexes, exit, |v| { v.compute_all_time_low_( starting_indexes.height, - &self.indexes_to_hash_price_phs.height, + &self.hash_price_phs.height, exit, true, )?; Ok(()) })?; - self.indexes_to_hash_value_ths_min + self.hash_value_ths_min .compute_all(indexes, starting_indexes, exit, |v| { v.compute_all_time_low_( starting_indexes.height, - &self.indexes_to_hash_value_ths.height, + &self.hash_value_ths.height, exit, true, )?; Ok(()) })?; - self.indexes_to_hash_value_phs_min + self.hash_value_phs_min .compute_all(indexes, starting_indexes, exit, |v| { v.compute_all_time_low_( starting_indexes.height, - &self.indexes_to_hash_value_phs.height, + &self.hash_value_phs.height, exit, true, )?; Ok(()) })?; - self.indexes_to_hash_price_rebound + self.hash_price_rebound .compute_all(indexes, starting_indexes, exit, |v| { v.compute_percentage_difference( starting_indexes.height, - &self.indexes_to_hash_price_phs.height, - &self.indexes_to_hash_price_phs_min.height, + &self.hash_price_phs.height, + &self.hash_price_phs_min.height, exit, )?; Ok(()) })?; - self.indexes_to_hash_value_rebound + self.hash_value_rebound .compute_all(indexes, starting_indexes, exit, |v| { v.compute_percentage_difference( starting_indexes.height, - &self.indexes_to_hash_value_phs.height, - &self.indexes_to_hash_value_phs_min.height, + &self.hash_value_phs.height, + &self.hash_value_phs_min.height, exit, )?; Ok(()) diff --git a/crates/brk_computer/src/blocks/mining/import.rs b/crates/brk_computer/src/blocks/mining/import.rs index 40d54cd50..9cedd5c66 100644 --- a/crates/brk_computer/src/blocks/mining/import.rs +++ b/crates/brk_computer/src/blocks/mining/import.rs @@ -20,111 +20,106 @@ impl Vecs { let v5 = Version::new(5); Ok(Self { - indexes_to_hash_rate: ComputedBlockLast::forced_import( - db, - "hash_rate", - version + v5, - indexes, - )?, - indexes_to_hash_rate_1w_sma: ComputedDateLast::forced_import( + hash_rate: ComputedBlockLast::forced_import(db, "hash_rate", version + v5, indexes)?, + hash_rate_1w_sma: ComputedDateLast::forced_import( db, "hash_rate_1w_sma", version, indexes, )?, - indexes_to_hash_rate_1m_sma: ComputedDateLast::forced_import( + hash_rate_1m_sma: ComputedDateLast::forced_import( db, "hash_rate_1m_sma", version, indexes, )?, - indexes_to_hash_rate_2m_sma: ComputedDateLast::forced_import( + hash_rate_2m_sma: ComputedDateLast::forced_import( db, "hash_rate_2m_sma", version, indexes, )?, - indexes_to_hash_rate_1y_sma: ComputedDateLast::forced_import( + hash_rate_1y_sma: ComputedDateLast::forced_import( db, "hash_rate_1y_sma", version, indexes, )?, - indexes_to_hash_price_ths: ComputedBlockLast::forced_import( + hash_price_ths: ComputedBlockLast::forced_import( db, "hash_price_ths", version + v4, indexes, )?, - indexes_to_hash_price_ths_min: ComputedBlockLast::forced_import( + hash_price_ths_min: ComputedBlockLast::forced_import( db, "hash_price_ths_min", version + v4, indexes, )?, - indexes_to_hash_price_phs: ComputedBlockLast::forced_import( + hash_price_phs: ComputedBlockLast::forced_import( db, "hash_price_phs", version + v4, indexes, )?, - indexes_to_hash_price_phs_min: ComputedBlockLast::forced_import( + hash_price_phs_min: ComputedBlockLast::forced_import( db, "hash_price_phs_min", version + v4, indexes, )?, - indexes_to_hash_price_rebound: ComputedBlockLast::forced_import( + hash_price_rebound: ComputedBlockLast::forced_import( db, "hash_price_rebound", version + v4, indexes, )?, - indexes_to_hash_value_ths: ComputedBlockLast::forced_import( + hash_value_ths: ComputedBlockLast::forced_import( db, "hash_value_ths", version + v4, indexes, )?, - indexes_to_hash_value_ths_min: ComputedBlockLast::forced_import( + hash_value_ths_min: ComputedBlockLast::forced_import( db, "hash_value_ths_min", version + v4, indexes, )?, - indexes_to_hash_value_phs: ComputedBlockLast::forced_import( + hash_value_phs: ComputedBlockLast::forced_import( db, "hash_value_phs", version + v4, indexes, )?, - indexes_to_hash_value_phs_min: ComputedBlockLast::forced_import( + hash_value_phs_min: ComputedBlockLast::forced_import( db, "hash_value_phs_min", version + v4, indexes, )?, - indexes_to_hash_value_rebound: ComputedBlockLast::forced_import( + hash_value_rebound: ComputedBlockLast::forced_import( db, "hash_value_rebound", version + v4, indexes, )?, // Derived from external indexer data - no height storage needed - indexes_to_difficulty: DerivedComputedBlockLast::forced_import( + difficulty: DerivedComputedBlockLast::forced_import( db, "difficulty", - indexer.vecs.block.height_to_difficulty.boxed_clone(), + indexer.vecs.blocks.difficulty.boxed_clone(), version, indexes, )?, - indexes_to_difficulty_as_hash: ComputedBlockLast::forced_import( + difficulty_as_hash: ComputedBlockLast::forced_import( db, "difficulty_as_hash", version, indexes, )?, - indexes_to_difficulty_adjustment: ComputedBlockSum::forced_import( + difficulty_adjustment: ComputedBlockSum::forced_import( db, "difficulty_adjustment", version, diff --git a/crates/brk_computer/src/blocks/mining/vecs.rs b/crates/brk_computer/src/blocks/mining/vecs.rs index 6051df11f..b2caa2289 100644 --- a/crates/brk_computer/src/blocks/mining/vecs.rs +++ b/crates/brk_computer/src/blocks/mining/vecs.rs @@ -8,23 +8,23 @@ use crate::internal::{ /// Mining-related metrics: hash rate, hash price, hash value, difficulty #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_hash_rate: ComputedBlockLast, - pub indexes_to_hash_rate_1w_sma: ComputedDateLast, - pub indexes_to_hash_rate_1m_sma: ComputedDateLast, - pub indexes_to_hash_rate_2m_sma: ComputedDateLast, - pub indexes_to_hash_rate_1y_sma: ComputedDateLast, - pub indexes_to_hash_price_ths: ComputedBlockLast, - pub indexes_to_hash_price_ths_min: ComputedBlockLast, - pub indexes_to_hash_price_phs: ComputedBlockLast, - pub indexes_to_hash_price_phs_min: ComputedBlockLast, - pub indexes_to_hash_price_rebound: ComputedBlockLast, - pub indexes_to_hash_value_ths: ComputedBlockLast, - pub indexes_to_hash_value_ths_min: ComputedBlockLast, - pub indexes_to_hash_value_phs: ComputedBlockLast, - pub indexes_to_hash_value_phs_min: ComputedBlockLast, - pub indexes_to_hash_value_rebound: ComputedBlockLast, + pub hash_rate: ComputedBlockLast, + pub hash_rate_1w_sma: ComputedDateLast, + pub hash_rate_1m_sma: ComputedDateLast, + pub hash_rate_2m_sma: ComputedDateLast, + pub hash_rate_1y_sma: ComputedDateLast, + pub hash_price_ths: ComputedBlockLast, + pub hash_price_ths_min: ComputedBlockLast, + pub hash_price_phs: ComputedBlockLast, + pub hash_price_phs_min: ComputedBlockLast, + pub hash_price_rebound: ComputedBlockLast, + pub hash_value_ths: ComputedBlockLast, + pub hash_value_ths_min: ComputedBlockLast, + pub hash_value_phs: ComputedBlockLast, + pub hash_value_phs_min: ComputedBlockLast, + pub hash_value_rebound: ComputedBlockLast, /// Derived from indexer - no height storage needed - pub indexes_to_difficulty: DerivedComputedBlockLast, - pub indexes_to_difficulty_as_hash: ComputedBlockLast, - pub indexes_to_difficulty_adjustment: ComputedBlockSum, + pub difficulty: DerivedComputedBlockLast, + pub difficulty_as_hash: ComputedBlockLast, + pub difficulty_adjustment: ComputedBlockSum, } diff --git a/crates/brk_computer/src/blocks/rewards/compute.rs b/crates/brk_computer/src/blocks/rewards/compute.rs index 79ae21444..b4a13d021 100644 --- a/crates/brk_computer/src/blocks/rewards/compute.rs +++ b/crates/brk_computer/src/blocks/rewards/compute.rs @@ -5,7 +5,7 @@ use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex}; use super::super::count; use super::Vecs; -use crate::{indexes, price, transactions, ComputeIndexes}; +use crate::{ComputeIndexes, indexes, price, transactions}; impl Vecs { #[allow(clippy::too_many_arguments)] @@ -19,16 +19,16 @@ impl Vecs { price: Option<&price::Vecs>, exit: &Exit, ) -> Result<()> { - self.indexes_to_coinbase + self.coinbase .compute_all(indexes, price, starting_indexes, exit, |vec| { let mut txindex_to_first_txoutindex_iter = - indexer.vecs.tx.txindex_to_first_txoutindex.iter()?; + indexer.vecs.transactions.first_txoutindex.iter()?; let mut txindex_to_output_count_iter = - indexes.transaction.txindex_to_output_count.iter(); - let mut txoutindex_to_value_iter = indexer.vecs.txout.txoutindex_to_value.iter()?; + indexes.txindex.output_count.iter(); + let mut txoutindex_to_value_iter = indexer.vecs.outputs.value.iter()?; vec.compute_transform( starting_indexes.height, - &indexer.vecs.tx.height_to_first_txindex, + &indexer.vecs.transactions.first_txindex, |(height, txindex, ..)| { let first_txoutindex = txindex_to_first_txoutindex_iter .get_unwrap(txindex) @@ -48,10 +48,10 @@ impl Vecs { Ok(()) })?; - let mut height_to_coinbase_iter = self.indexes_to_coinbase.sats.height.into_iter(); - self.height_to_24h_coinbase_sum.compute_transform( + let mut height_to_coinbase_iter = self.coinbase.sats.height.into_iter(); + self._24h_coinbase_sum.sats.compute_transform( starting_indexes.height, - &count_vecs.height_to_24h_block_count, + &count_vecs._24h_block_count.height, |(h, count, ..)| { let range = *h - (*count - 1)..=*h; let sum = range @@ -64,11 +64,13 @@ impl Vecs { )?; drop(height_to_coinbase_iter); - if let Some(ref dollars) = self.indexes_to_coinbase.dollars { - let mut height_to_coinbase_iter = dollars.height.into_iter(); - self.height_to_24h_coinbase_usd_sum.compute_transform( + if let (Some(dollars_out), Some(dollars_in)) = + (&mut self._24h_coinbase_sum.dollars, &self.coinbase.dollars) + { + let mut height_to_coinbase_iter = dollars_in.height.into_iter(); + dollars_out.compute_transform( starting_indexes.height, - &count_vecs.height_to_24h_block_count, + &count_vecs._24h_block_count.height, |(h, count, ..)| { let range = *h - (*count - 1)..=*h; let sum = range @@ -81,13 +83,12 @@ impl Vecs { )?; } - self.indexes_to_subsidy + self.subsidy .compute_all(indexes, price, starting_indexes, exit, |vec| { - // KISS: height.sum_cum.sum.0 is now a concrete field vec.compute_transform2( starting_indexes.height, - &self.indexes_to_coinbase.sats.height, - &transactions_fees.indexes_to_fee.sats.height.sum_cum.sum.0, + &self.coinbase.sats.height, + &transactions_fees.fee.sats.height.sum_cum.sum.0, |(height, coinbase, fees, ..)| { ( height, @@ -102,15 +103,11 @@ impl Vecs { Ok(()) })?; - self.indexes_to_unclaimed_rewards.compute_all( - indexes, - price, - starting_indexes, - exit, - |vec| { + self.unclaimed_rewards + .compute_all(indexes, price, starting_indexes, exit, |vec| { vec.compute_transform( starting_indexes.height, - &self.indexes_to_subsidy.sats.height, + &self.subsidy.sats.height, |(height, subsidy, ..)| { let halving = HalvingEpoch::from(height); let expected = Sats::FIFTY_BTC / 2_usize.pow(halving.to_usize() as u32); @@ -119,14 +116,12 @@ impl Vecs { exit, )?; Ok(()) - }, - )?; + })?; - // KISS: dateindex.sum_cum.sum.0 is now a concrete field - self.dateindex_to_fee_dominance.compute_transform2( + self.fee_dominance.compute_transform2( starting_indexes.dateindex, - &transactions_fees.indexes_to_fee.sats.dateindex.sum_cum.sum.0, - &self.indexes_to_coinbase.sats.dateindex.sum_cum.sum.0, + &transactions_fees.fee.sats.dateindex.sum_cum.sum.0, + &self.coinbase.sats.dateindex.sum_cum.sum.0, |(i, fee, coinbase, ..)| { let coinbase_f64 = u64::from(coinbase) as f64; let dominance = if coinbase_f64 == 0.0 { @@ -139,10 +134,10 @@ impl Vecs { exit, )?; - self.dateindex_to_subsidy_dominance.compute_transform2( + self.subsidy_dominance.compute_transform2( starting_indexes.dateindex, - &self.indexes_to_subsidy.sats.dateindex.sum_cum.sum.0, - &self.indexes_to_coinbase.sats.dateindex.sum_cum.sum.0, + &self.subsidy.sats.dateindex.sum_cum.sum.0, + &self.coinbase.sats.dateindex.sum_cum.sum.0, |(i, subsidy, coinbase, ..)| { let coinbase_f64 = u64::from(coinbase) as f64; let dominance = if coinbase_f64 == 0.0 { @@ -155,9 +150,9 @@ impl Vecs { exit, )?; - if let Some(sma) = self.indexes_to_subsidy_usd_1y_sma.as_mut() { + if let Some(sma) = self.subsidy_usd_1y_sma.as_mut() { let date_to_coinbase_usd_sum = &self - .indexes_to_coinbase + .coinbase .dollars .as_ref() .unwrap() diff --git a/crates/brk_computer/src/blocks/rewards/import.rs b/crates/brk_computer/src/blocks/rewards/import.rs index 48dbbad81..e0bb0286d 100644 --- a/crates/brk_computer/src/blocks/rewards/import.rs +++ b/crates/brk_computer/src/blocks/rewards/import.rs @@ -5,7 +5,7 @@ use vecdb::{Database, EagerVec, ImportableVec}; use super::Vecs; use crate::{ indexes, - internal::{ComputedDateLast, ValueBlockFull, ValueBlockSumCum}, + internal::{ComputedDateLast, ValueBlockFull, ValueBlockHeight, ValueBlockSumCum}, }; impl Vecs { @@ -16,40 +16,36 @@ impl Vecs { compute_dollars: bool, ) -> Result { Ok(Self { - height_to_24h_coinbase_sum: EagerVec::forced_import(db, "24h_coinbase_sum", version)?, - height_to_24h_coinbase_usd_sum: EagerVec::forced_import( + _24h_coinbase_sum: ValueBlockHeight::forced_import( db, - "24h_coinbase_usd_sum", + "24h_coinbase_sum", version, + compute_dollars, )?, - indexes_to_coinbase: ValueBlockFull::forced_import( + coinbase: ValueBlockFull::forced_import( db, "coinbase", version, indexes, compute_dollars, )?, - indexes_to_subsidy: ValueBlockFull::forced_import( + subsidy: ValueBlockFull::forced_import( db, "subsidy", version, indexes, compute_dollars, )?, - indexes_to_unclaimed_rewards: ValueBlockSumCum::forced_import( + unclaimed_rewards: ValueBlockSumCum::forced_import( db, "unclaimed_rewards", version, indexes, compute_dollars, )?, - dateindex_to_fee_dominance: EagerVec::forced_import(db, "fee_dominance", version)?, - dateindex_to_subsidy_dominance: EagerVec::forced_import( - db, - "subsidy_dominance", - version, - )?, - indexes_to_subsidy_usd_1y_sma: compute_dollars + fee_dominance: EagerVec::forced_import(db, "fee_dominance", version)?, + subsidy_dominance: EagerVec::forced_import(db, "subsidy_dominance", version)?, + subsidy_usd_1y_sma: compute_dollars .then(|| { ComputedDateLast::forced_import(db, "subsidy_usd_1y_sma", version, indexes) }) diff --git a/crates/brk_computer/src/blocks/rewards/vecs.rs b/crates/brk_computer/src/blocks/rewards/vecs.rs index e6f6dd3a8..9b48b6570 100644 --- a/crates/brk_computer/src/blocks/rewards/vecs.rs +++ b/crates/brk_computer/src/blocks/rewards/vecs.rs @@ -1,18 +1,17 @@ use brk_traversable::Traversable; -use brk_types::{DateIndex, Dollars, Height, Sats, StoredF32}; +use brk_types::{DateIndex, Dollars, StoredF32}; use vecdb::{EagerVec, PcoVec}; -use crate::internal::{ComputedDateLast, ValueBlockFull, ValueBlockSumCum}; +use crate::internal::{ComputedDateLast, ValueBlockFull, ValueBlockHeight, ValueBlockSumCum}; /// Coinbase/subsidy/rewards metrics #[derive(Clone, Traversable)] pub struct Vecs { - pub height_to_24h_coinbase_sum: EagerVec>, - pub height_to_24h_coinbase_usd_sum: EagerVec>, - pub indexes_to_coinbase: ValueBlockFull, - pub indexes_to_subsidy: ValueBlockFull, - pub indexes_to_unclaimed_rewards: ValueBlockSumCum, - pub dateindex_to_fee_dominance: EagerVec>, - pub dateindex_to_subsidy_dominance: EagerVec>, - pub indexes_to_subsidy_usd_1y_sma: Option>, + pub _24h_coinbase_sum: ValueBlockHeight, + pub coinbase: ValueBlockFull, + pub subsidy: ValueBlockFull, + pub unclaimed_rewards: ValueBlockSumCum, + pub fee_dominance: EagerVec>, + pub subsidy_dominance: EagerVec>, + pub subsidy_usd_1y_sma: Option>, } diff --git a/crates/brk_computer/src/blocks/size/compute.rs b/crates/brk_computer/src/blocks/size/compute.rs index 423c5ad14..8771780a7 100644 --- a/crates/brk_computer/src/blocks/size/compute.rs +++ b/crates/brk_computer/src/blocks/size/compute.rs @@ -3,7 +3,7 @@ use brk_indexer::Indexer; use vecdb::Exit; use super::Vecs; -use crate::{indexes, ComputeIndexes}; +use crate::{ComputeIndexes, indexes}; impl Vecs { pub fn compute( @@ -13,19 +13,14 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_block_size.derive_from( + self.size.derive_from( indexes, starting_indexes, - &indexer.vecs.block.height_to_total_size, + &indexer.vecs.blocks.total_size, exit, )?; - self.indexes_to_block_vbytes.derive_from( - indexes, - starting_indexes, - &self.height_to_vbytes, - exit, - )?; + self.vbytes.derive_from(indexes, starting_indexes, exit)?; Ok(()) } diff --git a/crates/brk_computer/src/blocks/size/import.rs b/crates/brk_computer/src/blocks/size/import.rs index 5b7d6b675..c3c6a5e1d 100644 --- a/crates/brk_computer/src/blocks/size/import.rs +++ b/crates/brk_computer/src/blocks/size/import.rs @@ -1,13 +1,10 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{Height, StoredU64, Version}; -use vecdb::{Database, IterableCloneableVec, LazyVecFrom1, VecIndex}; +use vecdb::{Database, IterableCloneableVec, VecIndex}; use super::Vecs; -use crate::{ - indexes, - internal::DerivedComputedBlockFull, -}; +use crate::{indexes, internal::{DerivedComputedBlockFull, LazyComputedBlockFull}}; impl Vecs { pub fn forced_import( @@ -16,33 +13,26 @@ impl Vecs { indexer: &Indexer, indexes: &indexes::Vecs, ) -> Result { - let height_to_vbytes = LazyVecFrom1::init( - "vbytes", - version, - indexer.vecs.block.height_to_weight.boxed_clone(), - |height: Height, weight_iter| { - weight_iter - .get_at(height.to_usize()) - .map(|w| StoredU64::from(w.to_vbytes_floor())) - }, - ); - Ok(Self { - indexes_to_block_size: DerivedComputedBlockFull::forced_import( - db, - "block_size", - indexer.vecs.block.height_to_total_size.boxed_clone(), - version, - indexes, - )?, - indexes_to_block_vbytes: DerivedComputedBlockFull::forced_import( + vbytes: LazyComputedBlockFull::forced_import_with_init( db, "block_vbytes", - height_to_vbytes.boxed_clone(), + version, + indexer.vecs.blocks.weight.clone(), + indexes, + |height: Height, weight_iter| { + weight_iter + .get_at(height.to_usize()) + .map(|w| StoredU64::from(w.to_vbytes_floor())) + }, + )?, + size: DerivedComputedBlockFull::forced_import( + db, + "block_size", + indexer.vecs.blocks.total_size.boxed_clone(), version, indexes, )?, - height_to_vbytes, }) } } diff --git a/crates/brk_computer/src/blocks/size/vecs.rs b/crates/brk_computer/src/blocks/size/vecs.rs index 40690d44f..3a33453af 100644 --- a/crates/brk_computer/src/blocks/size/vecs.rs +++ b/crates/brk_computer/src/blocks/size/vecs.rs @@ -1,12 +1,10 @@ use brk_traversable::Traversable; -use brk_types::{Height, StoredU64, Weight}; -use vecdb::LazyVecFrom1; +use brk_types::{StoredU64, Weight}; -use crate::internal::DerivedComputedBlockFull; +use crate::internal::{DerivedComputedBlockFull, LazyComputedBlockFull}; #[derive(Clone, Traversable)] pub struct Vecs { - pub height_to_vbytes: LazyVecFrom1, - pub indexes_to_block_size: DerivedComputedBlockFull, - pub indexes_to_block_vbytes: DerivedComputedBlockFull, + pub vbytes: LazyComputedBlockFull, + pub size: DerivedComputedBlockFull, } diff --git a/crates/brk_computer/src/blocks/time/compute.rs b/crates/brk_computer/src/blocks/time/compute.rs index 3fc4f4a4a..b04c9fbbf 100644 --- a/crates/brk_computer/src/blocks/time/compute.rs +++ b/crates/brk_computer/src/blocks/time/compute.rs @@ -4,7 +4,7 @@ use brk_types::Timestamp; use vecdb::{Exit, TypedVecIterator}; use super::Vecs; -use crate::{indexes, ComputeIndexes}; +use crate::{ComputeIndexes, indexes}; impl Vecs { /// Compute height-to-time fields early, before indexes are computed. @@ -16,9 +16,9 @@ impl Vecs { exit: &Exit, ) -> Result<()> { let mut prev_timestamp_fixed = None; - self.height_to_timestamp_fixed.compute_transform( + self.timestamp_fixed.compute_transform( starting_height, - &indexer.vecs.block.height_to_timestamp, + &indexer.vecs.blocks.timestamp, |(h, timestamp, height_to_timestamp_fixed_iter)| { if prev_timestamp_fixed.is_none() && let Some(prev_h) = h.decremented() @@ -46,16 +46,15 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.timeindexes_to_timestamp - .compute_all(starting_indexes, exit, |vec| { - vec.compute_transform( - starting_indexes.dateindex, - &indexes.time.dateindex_to_date, - |(di, d, ..)| (di, Timestamp::from(d)), - exit, - )?; - Ok(()) - })?; + self.timestamp.compute_all(|vec| { + vec.compute_transform( + starting_indexes.dateindex, + &indexes.dateindex.date, + |(di, d, ..)| (di, Timestamp::from(d)), + exit, + )?; + Ok(()) + })?; Ok(()) } diff --git a/crates/brk_computer/src/blocks/time/import.rs b/crates/brk_computer/src/blocks/time/import.rs index 5870f0a66..16274f727 100644 --- a/crates/brk_computer/src/blocks/time/import.rs +++ b/crates/brk_computer/src/blocks/time/import.rs @@ -1,12 +1,10 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::{Date, DifficultyEpoch, Height, Version}; -use vecdb::{ - Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, LazyVecFrom2, VecIndex, -}; +use brk_types::{Date, Height, Version}; +use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, VecIndex}; use super::Vecs; -use crate::{indexes, internal::ComputedVecsDateFirst}; +use crate::{indexes, internal::DerivedComputedBlockFirst}; impl Vecs { pub fn forced_import( @@ -18,35 +16,25 @@ impl Vecs { let height_to_timestamp_fixed = EagerVec::forced_import(db, "timestamp_fixed", version)?; Ok(Self { - height_to_date: LazyVecFrom1::init( + date: LazyVecFrom1::init( "date", version, - indexer.vecs.block.height_to_timestamp.boxed_clone(), + indexer.vecs.blocks.timestamp.boxed_clone(), |height: Height, timestamp_iter| { timestamp_iter.get_at(height.to_usize()).map(Date::from) }, ), - height_to_date_fixed: LazyVecFrom1::init( + date_fixed: LazyVecFrom1::init( "date_fixed", version, height_to_timestamp_fixed.boxed_clone(), |height: Height, timestamp_iter| timestamp_iter.get(height).map(Date::from), ), - height_to_timestamp_fixed, - difficultyepoch_to_timestamp: LazyVecFrom2::init( - "timestamp", - version, - indexes.block.difficultyepoch_to_first_height.boxed_clone(), - indexer.vecs.block.height_to_timestamp.boxed_clone(), - |di: DifficultyEpoch, first_height_iter, timestamp_iter| { - first_height_iter - .get(di) - .and_then(|h: Height| timestamp_iter.get(h)) - }, - ), - timeindexes_to_timestamp: ComputedVecsDateFirst::forced_import( + timestamp_fixed: height_to_timestamp_fixed, + timestamp: DerivedComputedBlockFirst::forced_import( db, "timestamp", + indexer.vecs.blocks.timestamp.boxed_clone(), version, indexes, )?, diff --git a/crates/brk_computer/src/blocks/time/vecs.rs b/crates/brk_computer/src/blocks/time/vecs.rs index e886a5704..227f194e8 100644 --- a/crates/brk_computer/src/blocks/time/vecs.rs +++ b/crates/brk_computer/src/blocks/time/vecs.rs @@ -1,16 +1,14 @@ use brk_traversable::Traversable; -use brk_types::{Date, DifficultyEpoch, Height, Timestamp}; -use vecdb::{EagerVec, LazyVecFrom1, LazyVecFrom2, PcoVec}; +use brk_types::{Date, Height, Timestamp}; +use vecdb::{EagerVec, LazyVecFrom1, PcoVec}; -use crate::internal::ComputedVecsDateFirst; +use crate::internal::DerivedComputedBlockFirst; /// Timestamp and date metrics for blocks #[derive(Clone, Traversable)] pub struct Vecs { - pub height_to_date: LazyVecFrom1, - pub height_to_date_fixed: LazyVecFrom1, - pub height_to_timestamp_fixed: EagerVec>, - pub difficultyepoch_to_timestamp: - LazyVecFrom2, - pub timeindexes_to_timestamp: ComputedVecsDateFirst, + pub date: LazyVecFrom1, + pub date_fixed: LazyVecFrom1, + pub timestamp_fixed: EagerVec>, + pub timestamp: DerivedComputedBlockFirst, } diff --git a/crates/brk_computer/src/blocks/weight/compute.rs b/crates/brk_computer/src/blocks/weight/compute.rs index e9a05634a..7cb5ad097 100644 --- a/crates/brk_computer/src/blocks/weight/compute.rs +++ b/crates/brk_computer/src/blocks/weight/compute.rs @@ -3,7 +3,7 @@ use brk_indexer::Indexer; use vecdb::Exit; use super::Vecs; -use crate::{indexes, ComputeIndexes}; +use crate::{ComputeIndexes, indexes}; impl Vecs { pub fn compute( @@ -13,12 +13,8 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_block_weight.derive_from( - indexes, - starting_indexes, - &indexer.vecs.block.height_to_weight, - exit, - )?; + self.weight + .derive_from(indexes, starting_indexes, &indexer.vecs.blocks.weight, exit)?; Ok(()) } diff --git a/crates/brk_computer/src/blocks/weight/import.rs b/crates/brk_computer/src/blocks/weight/import.rs index 8cd819a30..fe4f39a38 100644 --- a/crates/brk_computer/src/blocks/weight/import.rs +++ b/crates/brk_computer/src/blocks/weight/import.rs @@ -6,9 +6,7 @@ use vecdb::{Database, IterableCloneableVec}; use super::Vecs; use crate::{ indexes, - internal::{ - DerivedComputedBlockFull, LazyBlockFull, WeightToFullness, - }, + internal::{DerivedComputedBlockFull, LazyBlockFull, WeightToFullness}, }; impl Vecs { @@ -18,25 +16,21 @@ impl Vecs { indexer: &Indexer, indexes: &indexes::Vecs, ) -> Result { - let indexes_to_block_weight = DerivedComputedBlockFull::forced_import( + let weight = DerivedComputedBlockFull::forced_import( db, "block_weight", - indexer.vecs.block.height_to_weight.boxed_clone(), + indexer.vecs.blocks.weight.boxed_clone(), version, indexes, )?; - let indexes_to_block_fullness = - LazyBlockFull::from_derived::( - "block_fullness", - version, - indexer.vecs.block.height_to_weight.boxed_clone(), - &indexes_to_block_weight, - ); + let fullness = LazyBlockFull::from_derived::( + "block_fullness", + version, + indexer.vecs.blocks.weight.boxed_clone(), + &weight, + ); - Ok(Self { - indexes_to_block_weight, - indexes_to_block_fullness, - }) + Ok(Self { weight, fullness }) } } diff --git a/crates/brk_computer/src/blocks/weight/vecs.rs b/crates/brk_computer/src/blocks/weight/vecs.rs index baf72f3fd..65af41210 100644 --- a/crates/brk_computer/src/blocks/weight/vecs.rs +++ b/crates/brk_computer/src/blocks/weight/vecs.rs @@ -5,7 +5,6 @@ use crate::internal::{DerivedComputedBlockFull, LazyBlockFull}; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_block_weight: DerivedComputedBlockFull, - /// Block fullness as percentage of max block weight (0-100%) - pub indexes_to_block_fullness: LazyBlockFull, + pub weight: DerivedComputedBlockFull, + pub fullness: LazyBlockFull, } diff --git a/crates/brk_computer/src/cointime/activity/compute.rs b/crates/brk_computer/src/cointime/activity/compute.rs index f8c3065ce..1f83f2259 100644 --- a/crates/brk_computer/src/cointime/activity/compute.rs +++ b/crates/brk_computer/src/cointime/activity/compute.rs @@ -3,7 +3,7 @@ use brk_types::{Bitcoin, CheckedSub, StoredF64}; use vecdb::{Exit, TypedVecIterator}; use super::Vecs; -use crate::{distribution, indexes, ComputeIndexes}; +use crate::{ComputeIndexes, distribution, indexes}; impl Vecs { pub fn compute( @@ -13,9 +13,16 @@ impl Vecs { distribution: &distribution::Vecs, exit: &Exit, ) -> Result<()> { - let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply; + let circulating_supply = &distribution + .utxo_cohorts + .all + .metrics + .supply + .supply + .sats + .height; - self.indexes_to_coinblocks_created + self.coinblocks_created .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_transform( starting_indexes.height, @@ -26,21 +33,19 @@ impl Vecs { Ok(()) })?; - let indexes_to_coinblocks_destroyed = &distribution + let coinblocks_destroyed = &distribution .utxo_cohorts .all .metrics .activity - .indexes_to_coinblocks_destroyed; + .coinblocks_destroyed; - self.indexes_to_coinblocks_stored + self.coinblocks_stored .compute_all(indexes, starting_indexes, exit, |vec| { - let mut coinblocks_destroyed_iter = indexes_to_coinblocks_destroyed - .height - .into_iter(); + let mut coinblocks_destroyed_iter = coinblocks_destroyed.height.into_iter(); vec.compute_transform( starting_indexes.height, - &self.indexes_to_coinblocks_created.height, + &self.coinblocks_created.height, |(i, created, ..)| { let destroyed = coinblocks_destroyed_iter.get_unwrap(i); (i, created.checked_sub(destroyed).unwrap()) @@ -50,42 +55,38 @@ impl Vecs { Ok(()) })?; - self.indexes_to_liveliness + self.liveliness .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_divide( starting_indexes.height, - indexes_to_coinblocks_destroyed.height_cumulative.inner(), - self.indexes_to_coinblocks_created.height_cumulative.inner(), + coinblocks_destroyed.height_cumulative.inner(), + self.coinblocks_created.height_cumulative.inner(), exit, )?; Ok(()) })?; - self.indexes_to_vaultedness + self.vaultedness .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_transform( starting_indexes.height, - &self.indexes_to_liveliness.height, + &self.liveliness.height, |(i, v, ..)| (i, StoredF64::from(1.0).checked_sub(v).unwrap()), exit, )?; Ok(()) })?; - self.indexes_to_activity_to_vaultedness_ratio.compute_all( - indexes, - starting_indexes, - exit, - |vec| { + self.activity_to_vaultedness_ratio + .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_divide( starting_indexes.height, - &self.indexes_to_liveliness.height, - &self.indexes_to_vaultedness.height, + &self.liveliness.height, + &self.vaultedness.height, exit, )?; Ok(()) - }, - )?; + })?; Ok(()) } diff --git a/crates/brk_computer/src/cointime/activity/import.rs b/crates/brk_computer/src/cointime/activity/import.rs index 74e46b6e8..0517ebb25 100644 --- a/crates/brk_computer/src/cointime/activity/import.rs +++ b/crates/brk_computer/src/cointime/activity/import.rs @@ -11,31 +11,21 @@ use crate::{ impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - indexes_to_coinblocks_created: ComputedBlockSumCum::forced_import( + coinblocks_created: ComputedBlockSumCum::forced_import( db, "coinblocks_created", version, indexes, )?, - indexes_to_coinblocks_stored: ComputedBlockSumCum::forced_import( + coinblocks_stored: ComputedBlockSumCum::forced_import( db, "coinblocks_stored", version, indexes, )?, - indexes_to_liveliness: ComputedBlockLast::forced_import( - db, - "liveliness", - version, - indexes, - )?, - indexes_to_vaultedness: ComputedBlockLast::forced_import( - db, - "vaultedness", - version, - indexes, - )?, - indexes_to_activity_to_vaultedness_ratio: ComputedBlockLast::forced_import( + liveliness: ComputedBlockLast::forced_import(db, "liveliness", version, indexes)?, + vaultedness: ComputedBlockLast::forced_import(db, "vaultedness", version, indexes)?, + activity_to_vaultedness_ratio: ComputedBlockLast::forced_import( db, "activity_to_vaultedness_ratio", version, diff --git a/crates/brk_computer/src/cointime/activity/vecs.rs b/crates/brk_computer/src/cointime/activity/vecs.rs index 12083d784..a3bdc7c33 100644 --- a/crates/brk_computer/src/cointime/activity/vecs.rs +++ b/crates/brk_computer/src/cointime/activity/vecs.rs @@ -5,9 +5,9 @@ use crate::internal::{ComputedBlockLast, ComputedBlockSumCum}; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_coinblocks_created: ComputedBlockSumCum, - pub indexes_to_coinblocks_stored: ComputedBlockSumCum, - pub indexes_to_liveliness: ComputedBlockLast, - pub indexes_to_vaultedness: ComputedBlockLast, - pub indexes_to_activity_to_vaultedness_ratio: ComputedBlockLast, + pub coinblocks_created: ComputedBlockSumCum, + pub coinblocks_stored: ComputedBlockSumCum, + pub liveliness: ComputedBlockLast, + pub vaultedness: ComputedBlockLast, + pub activity_to_vaultedness_ratio: ComputedBlockLast, } diff --git a/crates/brk_computer/src/cointime/adjusted/compute.rs b/crates/brk_computer/src/cointime/adjusted/compute.rs index 5378b8f8e..674afdce0 100644 --- a/crates/brk_computer/src/cointime/adjusted/compute.rs +++ b/crates/brk_computer/src/cointime/adjusted/compute.rs @@ -1,9 +1,9 @@ use brk_error::Result; use vecdb::Exit; -use super::Vecs; use super::super::activity; -use crate::{supply, ComputeIndexes}; +use super::Vecs; +use crate::{ComputeIndexes, supply}; impl Vecs { pub fn compute( @@ -14,38 +14,39 @@ impl Vecs { has_price: bool, exit: &Exit, ) -> Result<()> { - self.indexes_to_cointime_adj_inflation_rate + self.cointime_adj_inflation_rate .compute_all(starting_indexes, exit, |v| { v.compute_multiply( starting_indexes.dateindex, - activity.indexes_to_activity_to_vaultedness_ratio.dateindex.inner(), - &supply.inflation.indexes.dateindex, + activity.activity_to_vaultedness_ratio.dateindex.inner(), + &supply.inflation.dateindex, exit, )?; Ok(()) })?; - self.indexes_to_cointime_adj_tx_btc_velocity + self.cointime_adj_tx_btc_velocity .compute_all(starting_indexes, exit, |v| { v.compute_multiply( starting_indexes.dateindex, - activity.indexes_to_activity_to_vaultedness_ratio.dateindex.inner(), - &supply.velocity.indexes_to_btc.dateindex, + activity.activity_to_vaultedness_ratio.dateindex.inner(), + &supply.velocity.btc.dateindex, exit, )?; Ok(()) })?; if has_price { - self.indexes_to_cointime_adj_tx_usd_velocity.compute_all(starting_indexes, exit, |v| { - v.compute_multiply( - starting_indexes.dateindex, - activity.indexes_to_activity_to_vaultedness_ratio.dateindex.inner(), - &supply.velocity.indexes_to_usd.as_ref().unwrap().dateindex, - exit, - )?; - Ok(()) - })?; + self.cointime_adj_tx_usd_velocity + .compute_all(starting_indexes, exit, |v| { + v.compute_multiply( + starting_indexes.dateindex, + activity.activity_to_vaultedness_ratio.dateindex.inner(), + &supply.velocity.usd.as_ref().unwrap().dateindex, + exit, + )?; + Ok(()) + })?; } Ok(()) diff --git a/crates/brk_computer/src/cointime/adjusted/import.rs b/crates/brk_computer/src/cointime/adjusted/import.rs index fc1c813d2..dfe97fe4c 100644 --- a/crates/brk_computer/src/cointime/adjusted/import.rs +++ b/crates/brk_computer/src/cointime/adjusted/import.rs @@ -8,19 +8,19 @@ use crate::{indexes, internal::ComputedDateLast}; impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - indexes_to_cointime_adj_inflation_rate: ComputedDateLast::forced_import( + cointime_adj_inflation_rate: ComputedDateLast::forced_import( db, "cointime_adj_inflation_rate", version, indexes, )?, - indexes_to_cointime_adj_tx_btc_velocity: ComputedDateLast::forced_import( + cointime_adj_tx_btc_velocity: ComputedDateLast::forced_import( db, "cointime_adj_tx_btc_velocity", version, indexes, )?, - indexes_to_cointime_adj_tx_usd_velocity: ComputedDateLast::forced_import( + cointime_adj_tx_usd_velocity: ComputedDateLast::forced_import( db, "cointime_adj_tx_usd_velocity", version, diff --git a/crates/brk_computer/src/cointime/adjusted/vecs.rs b/crates/brk_computer/src/cointime/adjusted/vecs.rs index 7d479ee33..cbf55d774 100644 --- a/crates/brk_computer/src/cointime/adjusted/vecs.rs +++ b/crates/brk_computer/src/cointime/adjusted/vecs.rs @@ -5,7 +5,7 @@ use crate::internal::ComputedDateLast; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_cointime_adj_inflation_rate: ComputedDateLast, - pub indexes_to_cointime_adj_tx_btc_velocity: ComputedDateLast, - pub indexes_to_cointime_adj_tx_usd_velocity: ComputedDateLast, + pub cointime_adj_inflation_rate: ComputedDateLast, + pub cointime_adj_tx_btc_velocity: ComputedDateLast, + pub cointime_adj_tx_usd_velocity: ComputedDateLast, } diff --git a/crates/brk_computer/src/cointime/cap/compute.rs b/crates/brk_computer/src/cointime/cap/compute.rs index 71f08e694..86b71eb27 100644 --- a/crates/brk_computer/src/cointime/cap/compute.rs +++ b/crates/brk_computer/src/cointime/cap/compute.rs @@ -4,7 +4,7 @@ use vecdb::Exit; use super::super::{activity, value}; use super::Vecs; -use crate::{blocks, distribution, indexes, utils::OptionExt, ComputeIndexes}; +use crate::{ComputeIndexes, blocks, distribution, indexes, utils::OptionExt}; impl Vecs { #[allow(clippy::too_many_arguments)] @@ -24,24 +24,25 @@ impl Vecs { .metrics .realized .u() - .height_to_realized_cap; + .realized_cap + .height; let circulating_supply = &distribution .utxo_cohorts .all .metrics .supply - .height_to_supply_value - .bitcoin; + .supply + .bitcoin + .height; - self.indexes_to_thermo_cap + self.thermo_cap .compute_all(indexes, starting_indexes, exit, |vec| { - // KISS: height_cumulative is now a concrete field (not Option) vec.compute_transform( starting_indexes.height, &blocks .rewards - .indexes_to_subsidy + .subsidy .dollars .as_ref() .unwrap() @@ -53,47 +54,47 @@ impl Vecs { Ok(()) })?; - self.indexes_to_investor_cap + self.investor_cap .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_subtract( starting_indexes.height, realized_cap, - &self.indexes_to_thermo_cap.height, + &self.thermo_cap.height, exit, )?; Ok(()) })?; - self.indexes_to_vaulted_cap + self.vaulted_cap .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_divide( starting_indexes.height, realized_cap, - &activity.indexes_to_vaultedness.height, + &activity.vaultedness.height, exit, )?; Ok(()) })?; - self.indexes_to_active_cap + self.active_cap .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_multiply( starting_indexes.height, realized_cap, - &activity.indexes_to_liveliness.height, + &activity.liveliness.height, exit, )?; Ok(()) })?; // cointime_cap = (cointime_value_destroyed_cumulative * circulating_supply) / coinblocks_stored_cumulative - self.indexes_to_cointime_cap + self.cointime_cap .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_transform3( starting_indexes.height, - value.indexes_to_cointime_value_destroyed.height_cumulative.inner(), + value.cointime_value_destroyed.height_cumulative.inner(), circulating_supply, - activity.indexes_to_coinblocks_stored.height_cumulative.inner(), + activity.coinblocks_stored.height_cumulative.inner(), |(i, destroyed, supply, stored, ..)| { let destroyed: f64 = *destroyed; let supply: f64 = supply.into(); diff --git a/crates/brk_computer/src/cointime/cap/import.rs b/crates/brk_computer/src/cointime/cap/import.rs index fbb30fdd4..4a6345072 100644 --- a/crates/brk_computer/src/cointime/cap/import.rs +++ b/crates/brk_computer/src/cointime/cap/import.rs @@ -8,36 +8,11 @@ use crate::{indexes, internal::ComputedBlockLast}; impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - indexes_to_thermo_cap: ComputedBlockLast::forced_import( - db, - "thermo_cap", - version, - indexes, - )?, - indexes_to_investor_cap: ComputedBlockLast::forced_import( - db, - "investor_cap", - version, - indexes, - )?, - indexes_to_vaulted_cap: ComputedBlockLast::forced_import( - db, - "vaulted_cap", - version, - indexes, - )?, - indexes_to_active_cap: ComputedBlockLast::forced_import( - db, - "active_cap", - version, - indexes, - )?, - indexes_to_cointime_cap: ComputedBlockLast::forced_import( - db, - "cointime_cap", - version, - indexes, - )?, + thermo_cap: ComputedBlockLast::forced_import(db, "thermo_cap", version, indexes)?, + investor_cap: ComputedBlockLast::forced_import(db, "investor_cap", version, indexes)?, + vaulted_cap: ComputedBlockLast::forced_import(db, "vaulted_cap", version, indexes)?, + active_cap: ComputedBlockLast::forced_import(db, "active_cap", version, indexes)?, + cointime_cap: ComputedBlockLast::forced_import(db, "cointime_cap", version, indexes)?, }) } } diff --git a/crates/brk_computer/src/cointime/cap/vecs.rs b/crates/brk_computer/src/cointime/cap/vecs.rs index 0a8f9f5e8..00e3a7bcf 100644 --- a/crates/brk_computer/src/cointime/cap/vecs.rs +++ b/crates/brk_computer/src/cointime/cap/vecs.rs @@ -5,9 +5,9 @@ use crate::internal::ComputedBlockLast; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_thermo_cap: ComputedBlockLast, - pub indexes_to_investor_cap: ComputedBlockLast, - pub indexes_to_vaulted_cap: ComputedBlockLast, - pub indexes_to_active_cap: ComputedBlockLast, - pub indexes_to_cointime_cap: ComputedBlockLast, + pub thermo_cap: ComputedBlockLast, + pub investor_cap: ComputedBlockLast, + pub vaulted_cap: ComputedBlockLast, + pub active_cap: ComputedBlockLast, + pub cointime_cap: ComputedBlockLast, } diff --git a/crates/brk_computer/src/cointime/pricing/compute.rs b/crates/brk_computer/src/cointime/pricing/compute.rs index cf98b1cc8..375f6c923 100644 --- a/crates/brk_computer/src/cointime/pricing/compute.rs +++ b/crates/brk_computer/src/cointime/pricing/compute.rs @@ -3,7 +3,7 @@ use vecdb::Exit; use super::super::{activity, cap, supply}; use super::Vecs; -use crate::{distribution, indexes, price, utils::OptionExt, ComputeIndexes}; +use crate::{ComputeIndexes, distribution, indexes, price, utils::OptionExt}; impl Vecs { #[allow(clippy::too_many_arguments)] @@ -18,91 +18,94 @@ impl Vecs { cap: &cap::Vecs, exit: &Exit, ) -> Result<()> { - let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply_value.bitcoin; + let circulating_supply = &distribution + .utxo_cohorts + .all + .metrics + .supply + .supply + .bitcoin + .height; let realized_price = &distribution .utxo_cohorts .all .metrics .realized .u() - .indexes_to_realized_price + .realized_price .height; - self.indexes_to_vaulted_price + self.vaulted_price .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_divide( starting_indexes.height, realized_price, - &activity.indexes_to_vaultedness.height, + &activity.vaultedness.height, exit, )?; Ok(()) })?; - self.indexes_to_vaulted_price_ratio.compute_rest( + self.vaulted_price_ratio.compute_rest( price, starting_indexes, exit, - Some(&self.indexes_to_vaulted_price.dateindex.0), + Some(&self.vaulted_price.dateindex.0), )?; - self.indexes_to_active_price + self.active_price .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_multiply( starting_indexes.height, realized_price, - &activity.indexes_to_liveliness.height, + &activity.liveliness.height, exit, )?; Ok(()) })?; - self.indexes_to_active_price_ratio.compute_rest( + self.active_price_ratio.compute_rest( price, starting_indexes, exit, - Some(&self.indexes_to_active_price.dateindex.0), + Some(&self.active_price.dateindex.0), )?; - self.indexes_to_true_market_mean.compute_all( - indexes, - starting_indexes, - exit, - |vec| { - vec.compute_divide( - starting_indexes.height, - &cap.indexes_to_investor_cap.height, - &supply.indexes_to_active_supply.bitcoin.height, - exit, - )?; - Ok(()) - }, - )?; - - self.indexes_to_true_market_mean_ratio.compute_rest( - price, - starting_indexes, - exit, - Some(&self.indexes_to_true_market_mean.dateindex.0), - )?; - - // cointime_price = cointime_cap / circulating_supply - self.indexes_to_cointime_price + self.true_market_mean .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_divide( starting_indexes.height, - &cap.indexes_to_cointime_cap.height, + &cap.investor_cap.height, + &supply.active_supply.bitcoin.height, + exit, + )?; + Ok(()) + })?; + + self.true_market_mean_ratio.compute_rest( + price, + starting_indexes, + exit, + Some(&self.true_market_mean.dateindex.0), + )?; + + // cointime_price = cointime_cap / circulating_supply + self.cointime_price + .compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_divide( + starting_indexes.height, + &cap.cointime_cap.height, circulating_supply, exit, )?; Ok(()) })?; - self.indexes_to_cointime_price_ratio.compute_rest( + self.cointime_price_ratio.compute_rest( price, starting_indexes, exit, - Some(&self.indexes_to_cointime_price.dateindex.0), + Some(&self.cointime_price.dateindex.0), )?; Ok(()) diff --git a/crates/brk_computer/src/cointime/pricing/import.rs b/crates/brk_computer/src/cointime/pricing/import.rs index 283f4a192..75e378eaa 100644 --- a/crates/brk_computer/src/cointime/pricing/import.rs +++ b/crates/brk_computer/src/cointime/pricing/import.rs @@ -4,8 +4,9 @@ use vecdb::Database; use super::Vecs; use crate::{ - indexes, price, - internal::{ComputedRatioVecsDate, ComputedBlockLast}, + indexes, + internal::{ComputedBlockLast, ComputedRatioVecsDate}, + price, }; impl Vecs { @@ -17,20 +18,15 @@ impl Vecs { ) -> Result { macro_rules! computed_h { ($name:expr) => { - ComputedBlockLast::forced_import( - db, - $name, - version, - indexes, - )? + ComputedBlockLast::forced_import(db, $name, version, indexes)? }; } // Extract price vecs before struct literal so they can be used as sources for ratios - let indexes_to_vaulted_price = computed_h!("vaulted_price"); - let indexes_to_active_price = computed_h!("active_price"); - let indexes_to_true_market_mean = computed_h!("true_market_mean"); - let indexes_to_cointime_price = computed_h!("cointime_price"); + let vaulted_price = computed_h!("vaulted_price"); + let active_price = computed_h!("active_price"); + let true_market_mean = computed_h!("true_market_mean"); + let cointime_price = computed_h!("cointime_price"); macro_rules! ratio_di { ($name:expr, $source:expr) => { @@ -47,20 +43,14 @@ impl Vecs { } Ok(Self { - indexes_to_vaulted_price_ratio: ratio_di!("vaulted_price", &indexes_to_vaulted_price), - indexes_to_vaulted_price, - indexes_to_active_price_ratio: ratio_di!("active_price", &indexes_to_active_price), - indexes_to_active_price, - indexes_to_true_market_mean_ratio: ratio_di!( - "true_market_mean", - &indexes_to_true_market_mean - ), - indexes_to_true_market_mean, - indexes_to_cointime_price_ratio: ratio_di!( - "cointime_price", - &indexes_to_cointime_price - ), - indexes_to_cointime_price, + vaulted_price_ratio: ratio_di!("vaulted_price", &vaulted_price), + vaulted_price, + active_price_ratio: ratio_di!("active_price", &active_price), + active_price, + true_market_mean_ratio: ratio_di!("true_market_mean", &true_market_mean), + true_market_mean, + cointime_price_ratio: ratio_di!("cointime_price", &cointime_price), + cointime_price, }) } } diff --git a/crates/brk_computer/src/cointime/pricing/vecs.rs b/crates/brk_computer/src/cointime/pricing/vecs.rs index b9929dd36..b1b951e74 100644 --- a/crates/brk_computer/src/cointime/pricing/vecs.rs +++ b/crates/brk_computer/src/cointime/pricing/vecs.rs @@ -1,16 +1,16 @@ use brk_traversable::Traversable; use brk_types::Dollars; -use crate::internal::{ComputedRatioVecsDate, ComputedBlockLast}; +use crate::internal::{ComputedBlockLast, ComputedRatioVecsDate}; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_vaulted_price: ComputedBlockLast, - pub indexes_to_vaulted_price_ratio: ComputedRatioVecsDate, - pub indexes_to_active_price: ComputedBlockLast, - pub indexes_to_active_price_ratio: ComputedRatioVecsDate, - pub indexes_to_true_market_mean: ComputedBlockLast, - pub indexes_to_true_market_mean_ratio: ComputedRatioVecsDate, - pub indexes_to_cointime_price: ComputedBlockLast, - pub indexes_to_cointime_price_ratio: ComputedRatioVecsDate, + pub vaulted_price: ComputedBlockLast, + pub vaulted_price_ratio: ComputedRatioVecsDate, + pub active_price: ComputedBlockLast, + pub active_price_ratio: ComputedRatioVecsDate, + pub true_market_mean: ComputedBlockLast, + pub true_market_mean_ratio: ComputedRatioVecsDate, + pub cointime_price: ComputedBlockLast, + pub cointime_price_ratio: ComputedRatioVecsDate, } diff --git a/crates/brk_computer/src/cointime/supply/compute.rs b/crates/brk_computer/src/cointime/supply/compute.rs index 0a87f7250..53ec97d2e 100644 --- a/crates/brk_computer/src/cointime/supply/compute.rs +++ b/crates/brk_computer/src/cointime/supply/compute.rs @@ -1,9 +1,9 @@ use brk_error::Result; use vecdb::Exit; -use super::Vecs; use super::super::activity; -use crate::{distribution, indexes, price, ComputeIndexes}; +use super::Vecs; +use crate::{ComputeIndexes, distribution, indexes, price}; impl Vecs { pub fn compute( @@ -15,39 +15,36 @@ impl Vecs { activity: &activity::Vecs, exit: &Exit, ) -> Result<()> { - let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply; + let circulating_supply = &distribution + .utxo_cohorts + .all + .metrics + .supply + .supply + .sats + .height; - self.indexes_to_vaulted_supply.compute_all( - indexes, - price, - starting_indexes, - exit, - |vec| { + self.vaulted_supply + .compute_all(indexes, price, starting_indexes, exit, |vec| { vec.compute_multiply( starting_indexes.height, circulating_supply, - &activity.indexes_to_vaultedness.height, + &activity.vaultedness.height, exit, )?; Ok(()) - }, - )?; + })?; - self.indexes_to_active_supply.compute_all( - indexes, - price, - starting_indexes, - exit, - |vec| { + self.active_supply + .compute_all(indexes, price, starting_indexes, exit, |vec| { vec.compute_multiply( starting_indexes.height, circulating_supply, - &activity.indexes_to_liveliness.height, + &activity.liveliness.height, exit, )?; Ok(()) - }, - )?; + })?; Ok(()) } diff --git a/crates/brk_computer/src/cointime/supply/import.rs b/crates/brk_computer/src/cointime/supply/import.rs index 459ef3809..a5335380c 100644 --- a/crates/brk_computer/src/cointime/supply/import.rs +++ b/crates/brk_computer/src/cointime/supply/import.rs @@ -3,10 +3,7 @@ use brk_types::Version; use vecdb::Database; use super::Vecs; -use crate::{ - indexes, - internal::ValueBlockLast, -}; +use crate::{indexes, internal::ValueBlockLast}; impl Vecs { pub fn forced_import( @@ -16,14 +13,14 @@ impl Vecs { compute_dollars: bool, ) -> Result { Ok(Self { - indexes_to_vaulted_supply: ValueBlockLast::forced_import( + vaulted_supply: ValueBlockLast::forced_import( db, "vaulted_supply", version, indexes, compute_dollars, )?, - indexes_to_active_supply: ValueBlockLast::forced_import( + active_supply: ValueBlockLast::forced_import( db, "active_supply", version, diff --git a/crates/brk_computer/src/cointime/supply/vecs.rs b/crates/brk_computer/src/cointime/supply/vecs.rs index a1bc13b06..9b9f9ee49 100644 --- a/crates/brk_computer/src/cointime/supply/vecs.rs +++ b/crates/brk_computer/src/cointime/supply/vecs.rs @@ -4,6 +4,6 @@ use crate::internal::ValueBlockLast; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_vaulted_supply: ValueBlockLast, - pub indexes_to_active_supply: ValueBlockLast, + pub vaulted_supply: ValueBlockLast, + pub active_supply: ValueBlockLast, } diff --git a/crates/brk_computer/src/cointime/value/compute.rs b/crates/brk_computer/src/cointime/value/compute.rs index 60fc6b264..5aa45229a 100644 --- a/crates/brk_computer/src/cointime/value/compute.rs +++ b/crates/brk_computer/src/cointime/value/compute.rs @@ -3,7 +3,7 @@ use vecdb::Exit; use super::super::activity; use super::Vecs; -use crate::{distribution, indexes, price, ComputeIndexes}; +use crate::{ComputeIndexes, distribution, indexes, price}; impl Vecs { pub fn compute( @@ -15,57 +15,45 @@ impl Vecs { activity: &activity::Vecs, exit: &Exit, ) -> Result<()> { - let indexes_to_coinblocks_destroyed = &distribution + let coinblocks_destroyed = &distribution .utxo_cohorts .all .metrics .activity - .indexes_to_coinblocks_destroyed; + .coinblocks_destroyed; - self.indexes_to_cointime_value_destroyed.compute_all( - indexes, - starting_indexes, - exit, - |vec| { + self.cointime_value_destroyed + .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_multiply( starting_indexes.height, - &price.usd.chainindexes_to_price_close.height, - &indexes_to_coinblocks_destroyed.height, + &price.usd.split.close.height, + &coinblocks_destroyed.height, exit, )?; Ok(()) - }, - )?; + })?; - self.indexes_to_cointime_value_created.compute_all( - indexes, - starting_indexes, - exit, - |vec| { + self.cointime_value_created + .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_multiply( starting_indexes.height, - &price.usd.chainindexes_to_price_close.height, - &activity.indexes_to_coinblocks_created.height, + &price.usd.split.close.height, + &activity.coinblocks_created.height, exit, )?; Ok(()) - }, - )?; + })?; - self.indexes_to_cointime_value_stored.compute_all( - indexes, - starting_indexes, - exit, - |vec| { + self.cointime_value_stored + .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_multiply( starting_indexes.height, - &price.usd.chainindexes_to_price_close.height, - &activity.indexes_to_coinblocks_stored.height, + &price.usd.split.close.height, + &activity.coinblocks_stored.height, exit, )?; Ok(()) - }, - )?; + })?; Ok(()) } diff --git a/crates/brk_computer/src/cointime/value/import.rs b/crates/brk_computer/src/cointime/value/import.rs index 89f7cc907..0fc99be94 100644 --- a/crates/brk_computer/src/cointime/value/import.rs +++ b/crates/brk_computer/src/cointime/value/import.rs @@ -8,19 +8,19 @@ use crate::{indexes, internal::ComputedBlockSumCum}; impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - indexes_to_cointime_value_destroyed: ComputedBlockSumCum::forced_import( + cointime_value_destroyed: ComputedBlockSumCum::forced_import( db, "cointime_value_destroyed", version, indexes, )?, - indexes_to_cointime_value_created: ComputedBlockSumCum::forced_import( + cointime_value_created: ComputedBlockSumCum::forced_import( db, "cointime_value_created", version, indexes, )?, - indexes_to_cointime_value_stored: ComputedBlockSumCum::forced_import( + cointime_value_stored: ComputedBlockSumCum::forced_import( db, "cointime_value_stored", version, diff --git a/crates/brk_computer/src/cointime/value/vecs.rs b/crates/brk_computer/src/cointime/value/vecs.rs index 9d00e3c1d..51fba2db7 100644 --- a/crates/brk_computer/src/cointime/value/vecs.rs +++ b/crates/brk_computer/src/cointime/value/vecs.rs @@ -5,7 +5,7 @@ use crate::internal::ComputedBlockSumCum; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_cointime_value_destroyed: ComputedBlockSumCum, - pub indexes_to_cointime_value_created: ComputedBlockSumCum, - pub indexes_to_cointime_value_stored: ComputedBlockSumCum, + pub cointime_value_destroyed: ComputedBlockSumCum, + pub cointime_value_created: ComputedBlockSumCum, + pub cointime_value_stored: ComputedBlockSumCum, } diff --git a/crates/brk_computer/src/distribution/address/address_count.rs b/crates/brk_computer/src/distribution/address/address_count.rs index eaa250f4e..61dc0ccf8 100644 --- a/crates/brk_computer/src/distribution/address/address_count.rs +++ b/crates/brk_computer/src/distribution/address/address_count.rs @@ -5,29 +5,70 @@ use brk_types::{Height, StoredU64, Version}; use derive_more::{Deref, DerefMut}; use rayon::prelude::*; use vecdb::{ - AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, - IterableCloneableVec, PcoVec, TypedVecIterator, + AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, PcoVec, TypedVecIterator, }; -use crate::{ComputeIndexes, indexes, internal::DerivedComputedBlockLast}; +use crate::{ComputeIndexes, indexes, internal::ComputedBlockLast}; /// Address count per address type (runtime state). #[derive(Debug, Default, Deref, DerefMut)] pub struct AddressTypeToAddressCount(ByAddressType); -impl From<(&AddressTypeToHeightToAddressCount, Height)> for AddressTypeToAddressCount { +impl AddressTypeToAddressCount { #[inline] - fn from((groups, starting_height): (&AddressTypeToHeightToAddressCount, Height)) -> Self { + pub fn sum(&self) -> u64 { + self.0.values().sum() + } +} + +impl From<(&AddressTypeToAddrCountVecs, Height)> for AddressTypeToAddressCount { + #[inline] + fn from((groups, starting_height): (&AddressTypeToAddrCountVecs, Height)) -> Self { if let Some(prev_height) = starting_height.decremented() { Self(ByAddressType { - p2pk65: groups.p2pk65.into_iter().get_unwrap(prev_height).into(), - p2pk33: groups.p2pk33.into_iter().get_unwrap(prev_height).into(), - p2pkh: groups.p2pkh.into_iter().get_unwrap(prev_height).into(), - p2sh: groups.p2sh.into_iter().get_unwrap(prev_height).into(), - p2wpkh: groups.p2wpkh.into_iter().get_unwrap(prev_height).into(), - p2wsh: groups.p2wsh.into_iter().get_unwrap(prev_height).into(), - p2tr: groups.p2tr.into_iter().get_unwrap(prev_height).into(), - p2a: groups.p2a.into_iter().get_unwrap(prev_height).into(), + p2pk65: groups + .p2pk65 + .height + .into_iter() + .get_unwrap(prev_height) + .into(), + p2pk33: groups + .p2pk33 + .height + .into_iter() + .get_unwrap(prev_height) + .into(), + p2pkh: groups + .p2pkh + .height + .into_iter() + .get_unwrap(prev_height) + .into(), + p2sh: groups + .p2sh + .height + .into_iter() + .get_unwrap(prev_height) + .into(), + p2wpkh: groups + .p2wpkh + .height + .into_iter() + .get_unwrap(prev_height) + .into(), + p2wsh: groups + .p2wsh + .height + .into_iter() + .get_unwrap(prev_height) + .into(), + p2tr: groups + .p2tr + .height + .into_iter() + .get_unwrap(prev_height) + .into(), + p2a: groups.p2a.height.into_iter().get_unwrap(prev_height).into(), }) } else { Default::default() @@ -35,200 +76,213 @@ impl From<(&AddressTypeToHeightToAddressCount, Height)> for AddressTypeToAddress } } -/// Address count per address type, indexed by height. -#[derive(Debug, Clone, Deref, DerefMut, Traversable)] -pub struct AddressTypeToHeightToAddressCount(ByAddressType>>); - -impl From>>> - for AddressTypeToHeightToAddressCount -{ - #[inline] - fn from(value: ByAddressType>>) -> Self { - Self(value) - } -} - -impl AddressTypeToHeightToAddressCount { - pub fn min_len(&self) -> usize { - self.p2pk65 - .len() - .min(self.p2pk33.len()) - .min(self.p2pkh.len()) - .min(self.p2sh.len()) - .min(self.p2wpkh.len()) - .min(self.p2wsh.len()) - .min(self.p2tr.len()) - .min(self.p2a.len()) - } - - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self::from(ByAddressType::new_with_name(|type_name| { - Ok(EagerVec::forced_import( - db, - &format!("{type_name}_{name}"), - version, - )?) - })?)) - } - - /// Returns a parallel iterator over all vecs for parallel writing. - pub fn par_iter_mut(&mut self) -> impl ParallelIterator { - let inner = &mut self.0; - [ - &mut inner.p2pk65 as &mut dyn AnyStoredVec, - &mut inner.p2pk33 as &mut dyn AnyStoredVec, - &mut inner.p2pkh as &mut dyn AnyStoredVec, - &mut inner.p2sh as &mut dyn AnyStoredVec, - &mut inner.p2wpkh as &mut dyn AnyStoredVec, - &mut inner.p2wsh as &mut dyn AnyStoredVec, - &mut inner.p2tr as &mut dyn AnyStoredVec, - &mut inner.p2a as &mut dyn AnyStoredVec, - ] - .into_par_iter() - } - - pub fn write(&mut self) -> Result<()> { - self.p2pk65.write()?; - self.p2pk33.write()?; - self.p2pkh.write()?; - self.p2sh.write()?; - self.p2wpkh.write()?; - self.p2wsh.write()?; - self.p2tr.write()?; - self.p2a.write()?; - Ok(()) - } - - pub fn truncate_push( - &mut self, - height: Height, - addresstype_to_usize: &AddressTypeToAddressCount, - ) -> Result<()> { - self.p2pk65 - .truncate_push(height, addresstype_to_usize.p2pk65.into())?; - self.p2pk33 - .truncate_push(height, addresstype_to_usize.p2pk33.into())?; - self.p2pkh - .truncate_push(height, addresstype_to_usize.p2pkh.into())?; - self.p2sh - .truncate_push(height, addresstype_to_usize.p2sh.into())?; - self.p2wpkh - .truncate_push(height, addresstype_to_usize.p2wpkh.into())?; - self.p2wsh - .truncate_push(height, addresstype_to_usize.p2wsh.into())?; - self.p2tr - .truncate_push(height, addresstype_to_usize.p2tr.into())?; - self.p2a - .truncate_push(height, addresstype_to_usize.p2a.into())?; - Ok(()) - } - - pub fn reset(&mut self) -> Result<()> { - use vecdb::GenericStoredVec; - self.p2pk65.reset()?; - self.p2pk33.reset()?; - self.p2pkh.reset()?; - self.p2sh.reset()?; - self.p2wpkh.reset()?; - self.p2wsh.reset()?; - self.p2tr.reset()?; - self.p2a.reset()?; - Ok(()) - } -} - -/// Address count per address type, indexed by various indexes (dateindex, etc.). +/// Address count per address type, with height + derived indexes. #[derive(Clone, Deref, DerefMut, Traversable)] -pub struct AddressTypeToIndexesToAddressCount(ByAddressType>); +pub struct AddressTypeToAddrCountVecs(ByAddressType>); -impl From>> - for AddressTypeToIndexesToAddressCount -{ +impl From>> for AddressTypeToAddrCountVecs { #[inline] - fn from(value: ByAddressType>) -> Self { + fn from(value: ByAddressType>) -> Self { Self(value) } } -impl AddressTypeToIndexesToAddressCount { +impl AddressTypeToAddrCountVecs { pub fn forced_import( db: &Database, name: &str, version: Version, indexes: &indexes::Vecs, - sources: &AddressTypeToHeightToAddressCount, ) -> Result { - Ok(Self::from(ByAddressType::< - DerivedComputedBlockLast, - >::try_zip_with_name( - sources, - |type_name, source| { - DerivedComputedBlockLast::forced_import( + Ok(Self::from( + ByAddressType::>::new_with_name(|type_name| { + ComputedBlockLast::forced_import( db, &format!("{type_name}_{name}"), - source.boxed_clone(), version, indexes, ) - }, - )?)) + })?, + )) } - pub fn compute( + pub fn min_len(&self) -> usize { + self.p2pk65 + .height + .len() + .min(self.p2pk33.height.len()) + .min(self.p2pkh.height.len()) + .min(self.p2sh.height.len()) + .min(self.p2wpkh.height.len()) + .min(self.p2wsh.height.len()) + .min(self.p2tr.height.len()) + .min(self.p2a.height.len()) + } + + pub fn par_iter_height_mut(&mut self) -> impl ParallelIterator { + let inner = &mut self.0; + [ + &mut inner.p2pk65.height as &mut dyn AnyStoredVec, + &mut inner.p2pk33.height as &mut dyn AnyStoredVec, + &mut inner.p2pkh.height as &mut dyn AnyStoredVec, + &mut inner.p2sh.height as &mut dyn AnyStoredVec, + &mut inner.p2wpkh.height as &mut dyn AnyStoredVec, + &mut inner.p2wsh.height as &mut dyn AnyStoredVec, + &mut inner.p2tr.height as &mut dyn AnyStoredVec, + &mut inner.p2a.height as &mut dyn AnyStoredVec, + ] + .into_par_iter() + } + + pub fn write_height(&mut self) -> Result<()> { + self.p2pk65.height.write()?; + self.p2pk33.height.write()?; + self.p2pkh.height.write()?; + self.p2sh.height.write()?; + self.p2wpkh.height.write()?; + self.p2wsh.height.write()?; + self.p2tr.height.write()?; + self.p2a.height.write()?; + Ok(()) + } + + pub fn truncate_push_height( + &mut self, + height: Height, + addr_counts: &AddressTypeToAddressCount, + ) -> Result<()> { + self.p2pk65 + .height + .truncate_push(height, addr_counts.p2pk65.into())?; + self.p2pk33 + .height + .truncate_push(height, addr_counts.p2pk33.into())?; + self.p2pkh + .height + .truncate_push(height, addr_counts.p2pkh.into())?; + self.p2sh + .height + .truncate_push(height, addr_counts.p2sh.into())?; + self.p2wpkh + .height + .truncate_push(height, addr_counts.p2wpkh.into())?; + self.p2wsh + .height + .truncate_push(height, addr_counts.p2wsh.into())?; + self.p2tr + .height + .truncate_push(height, addr_counts.p2tr.into())?; + self.p2a + .height + .truncate_push(height, addr_counts.p2a.into())?; + Ok(()) + } + + pub fn reset_height(&mut self) -> Result<()> { + use vecdb::GenericStoredVec; + self.p2pk65.height.reset()?; + self.p2pk33.height.reset()?; + self.p2pkh.height.reset()?; + self.p2sh.height.reset()?; + self.p2wpkh.height.reset()?; + self.p2wsh.height.reset()?; + self.p2tr.height.reset()?; + self.p2a.height.reset()?; + Ok(()) + } + + pub fn compute_rest( &mut self, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, - addresstype_to_height_to_addresscount: &AddressTypeToHeightToAddressCount, ) -> Result<()> { - self.p2pk65.derive_from( - indexes, - starting_indexes, - &addresstype_to_height_to_addresscount.p2pk65, - exit, - )?; - self.p2pk33.derive_from( - indexes, - starting_indexes, - &addresstype_to_height_to_addresscount.p2pk33, - exit, - )?; - self.p2pkh.derive_from( - indexes, - starting_indexes, - &addresstype_to_height_to_addresscount.p2pkh, - exit, - )?; - self.p2sh.derive_from( - indexes, - starting_indexes, - &addresstype_to_height_to_addresscount.p2sh, - exit, - )?; - self.p2wpkh.derive_from( - indexes, - starting_indexes, - &addresstype_to_height_to_addresscount.p2wpkh, - exit, - )?; - self.p2wsh.derive_from( - indexes, - starting_indexes, - &addresstype_to_height_to_addresscount.p2wsh, - exit, - )?; - self.p2tr.derive_from( - indexes, - starting_indexes, - &addresstype_to_height_to_addresscount.p2tr, - exit, - )?; - self.p2a.derive_from( - indexes, - starting_indexes, - &addresstype_to_height_to_addresscount.p2a, - exit, - )?; + self.p2pk65.compute_rest(indexes, starting_indexes, exit)?; + self.p2pk33.compute_rest(indexes, starting_indexes, exit)?; + self.p2pkh.compute_rest(indexes, starting_indexes, exit)?; + self.p2sh.compute_rest(indexes, starting_indexes, exit)?; + self.p2wpkh.compute_rest(indexes, starting_indexes, exit)?; + self.p2wsh.compute_rest(indexes, starting_indexes, exit)?; + self.p2tr.compute_rest(indexes, starting_indexes, exit)?; + self.p2a.compute_rest(indexes, starting_indexes, exit)?; + Ok(()) + } + + pub fn by_height(&self) -> Vec<&EagerVec>> { + vec![ + &self.p2pk65.height, + &self.p2pk33.height, + &self.p2pkh.height, + &self.p2sh.height, + &self.p2wpkh.height, + &self.p2wsh.height, + &self.p2tr.height, + &self.p2a.height, + ] + } +} + +#[derive(Clone, Traversable)] +pub struct AddrCountVecs { + pub all: ComputedBlockLast, + pub by_addresstype: AddressTypeToAddrCountVecs, +} + +impl AddrCountVecs { + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + Ok(Self { + all: ComputedBlockLast::forced_import(db, name, version, indexes)?, + by_addresstype: AddressTypeToAddrCountVecs::forced_import(db, name, version, indexes)?, + }) + } + + pub fn min_len(&self) -> usize { + self.all.height.len().min(self.by_addresstype.min_len()) + } + + pub fn par_iter_height_mut(&mut self) -> impl ParallelIterator { + rayon::iter::once(&mut self.all.height as &mut dyn AnyStoredVec) + .chain(self.by_addresstype.par_iter_height_mut()) + } + + pub fn reset_height(&mut self) -> Result<()> { + self.all.height.reset()?; + self.by_addresstype.reset_height()?; + Ok(()) + } + + pub fn truncate_push_height( + &mut self, + height: Height, + total: u64, + addr_counts: &AddressTypeToAddressCount, + ) -> Result<()> { + self.all.height.truncate_push(height, total.into())?; + self.by_addresstype + .truncate_push_height(height, addr_counts)?; + Ok(()) + } + + pub fn compute_rest( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.by_addresstype + .compute_rest(indexes, starting_indexes, exit)?; + + let sources = self.by_addresstype.by_height(); + self.all + .compute_all(indexes, starting_indexes, exit, |height_vec| { + Ok(height_vec.compute_sum_of_others(starting_indexes.height, &sources, exit)?) + })?; + Ok(()) } } diff --git a/crates/brk_computer/src/distribution/address/mod.rs b/crates/brk_computer/src/distribution/address/mod.rs index 163f325c3..f55205643 100644 --- a/crates/brk_computer/src/distribution/address/mod.rs +++ b/crates/brk_computer/src/distribution/address/mod.rs @@ -3,10 +3,7 @@ mod data; mod indexes; mod type_map; -pub use address_count::{ - AddressTypeToAddressCount, AddressTypeToHeightToAddressCount, - AddressTypeToIndexesToAddressCount, -}; +pub use address_count::{AddrCountVecs, AddressTypeToAddressCount}; pub use data::AddressesDataVecs; pub use indexes::AnyAddressIndexesVecs; pub use type_map::{AddressTypeToTypeIndexMap, AddressTypeToVec, HeightToAddressTypeToVec}; diff --git a/crates/brk_computer/src/distribution/cohorts/address/vecs.rs b/crates/brk_computer/src/distribution/cohorts/address/vecs.rs index f8709155e..db633f541 100644 --- a/crates/brk_computer/src/distribution/cohorts/address/vecs.rs +++ b/crates/brk_computer/src/distribution/cohorts/address/vecs.rs @@ -5,16 +5,13 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{DateIndex, Dollars, Height, StoredU64, Version}; use rayon::prelude::*; -use vecdb::{ - AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, - IterableCloneableVec, IterableVec, PcoVec, -}; +use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableVec}; use crate::{ ComputeIndexes, distribution::state::AddressCohortState, indexes, - internal::DerivedComputedBlockLast, + internal::ComputedBlockLast, price, }; @@ -38,11 +35,7 @@ pub struct AddressCohortVecs { #[traversable(flatten)] pub metrics: CohortMetrics, - /// Address count at each height - pub height_to_addr_count: EagerVec>, - - /// Address count indexed by various dimensions - pub indexes_to_addr_count: DerivedComputedBlockLast, + pub addr_count: ComputedBlockLast, } impl AddressCohortVecs { @@ -75,9 +68,6 @@ impl AddressCohortVecs { up_to_1h_realized: None, }; - let height_to_addr_count = - EagerVec::forced_import(db, &cfg.name("addr_count"), version + VERSION)?; - Ok(Self { starting_height: None, @@ -86,14 +76,12 @@ impl AddressCohortVecs { metrics: CohortMetrics::forced_import(&cfg, all_supply)?, - indexes_to_addr_count: DerivedComputedBlockLast::forced_import( + addr_count: ComputedBlockLast::forced_import( db, &cfg.name("addr_count"), - height_to_addr_count.boxed_clone(), version + VERSION, indexes, )?, - height_to_addr_count, }) } @@ -114,7 +102,7 @@ impl AddressCohortVecs { /// Returns a parallel iterator over all vecs for parallel writing. pub fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator { - rayon::iter::once(&mut self.height_to_addr_count as &mut dyn AnyStoredVec) + rayon::iter::once(&mut self.addr_count.height as &mut dyn AnyStoredVec) .chain(self.metrics.par_iter_mut()) } @@ -135,7 +123,8 @@ impl Filtered for AddressCohortVecs { impl DynCohortVecs for AddressCohortVecs { fn min_stateful_height_len(&self) -> usize { - self.height_to_addr_count + self.addr_count + .height .len() .min(self.metrics.min_stateful_height_len()) } @@ -166,21 +155,25 @@ impl DynCohortVecs for AddressCohortVecs { state.inner.supply.value = self .metrics .supply - .height_to_supply + .supply + .sats + .height .read_once(prev_height)?; state.inner.supply.utxo_count = *self .metrics - .supply - .height_to_utxo_count + .outputs + .utxo_count + .height .read_once(prev_height)?; - state.addr_count = *self.height_to_addr_count.read_once(prev_height)?; + state.addr_count = *self.addr_count.height.read_once(prev_height)?; // Restore realized cap if present if let Some(realized_metrics) = self.metrics.realized.as_mut() && let Some(realized_state) = state.inner.realized.as_mut() { realized_state.cap = realized_metrics - .height_to_realized_cap + .realized_cap + .height .read_once(prev_height)?; } @@ -200,7 +193,8 @@ impl DynCohortVecs for AddressCohortVecs { fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> { use vecdb::GenericStoredVec; - self.height_to_addr_count + self.addr_count + .height .validate_computed_version_or_reset(base_version)?; self.metrics.validate_computed_versions(base_version)?; Ok(()) @@ -213,7 +207,8 @@ impl DynCohortVecs for AddressCohortVecs { // Push addr_count from state if let Some(state) = self.state.as_ref() { - self.height_to_addr_count + self.addr_count + .height .truncate_push(height, state.addr_count.into())?; self.metrics.truncate_push(height, &state.inner)?; } @@ -247,12 +242,8 @@ impl DynCohortVecs for AddressCohortVecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_addr_count.derive_from( - indexes, - starting_indexes, - &self.height_to_addr_count, - exit, - )?; + self.addr_count + .compute_rest(indexes, starting_indexes, exit)?; self.metrics .compute_rest_part1(indexes, price, starting_indexes, exit)?; Ok(()) @@ -266,11 +257,11 @@ impl CohortVecs for AddressCohortVecs { others: &[&Self], exit: &Exit, ) -> Result<()> { - self.height_to_addr_count.compute_sum_of_others( + self.addr_count.height.compute_sum_of_others( starting_indexes.height, others .iter() - .map(|v| &v.height_to_addr_count) + .map(|v| &v.addr_count.height) .collect::>() .as_slice(), exit, diff --git a/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs b/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs index 3961be19e..eaa2bc8c5 100644 --- a/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs +++ b/crates/brk_computer/src/distribution/cohorts/utxo/vecs.rs @@ -149,12 +149,15 @@ impl DynCohortVecs for UTXOCohortVecs { state.supply.value = self .metrics .supply - .height_to_supply + .supply + .sats + .height .read_once(prev_height)?; state.supply.utxo_count = *self .metrics - .supply - .height_to_utxo_count + .outputs + .utxo_count + .height .read_once(prev_height)?; // Restore realized cap if present @@ -162,7 +165,8 @@ impl DynCohortVecs for UTXOCohortVecs { && let Some(realized_state) = state.realized.as_mut() { realized_state.cap = realized_metrics - .height_to_realized_cap + .realized_cap + .height .read_once(prev_height)?; } diff --git a/crates/brk_computer/src/distribution/compute/aggregates.rs b/crates/brk_computer/src/distribution/compute/aggregates.rs index c50272a85..b23f1d5cf 100644 --- a/crates/brk_computer/src/distribution/compute/aggregates.rs +++ b/crates/brk_computer/src/distribution/compute/aggregates.rs @@ -1,6 +1,6 @@ use brk_error::Result; use brk_types::{DateIndex, Dollars, Height}; -use log::info; +use tracing::info; use vecdb::{Exit, IterableVec}; use crate::{ComputeIndexes, indexes, price}; diff --git a/crates/brk_computer/src/distribution/compute/block_loop.rs b/crates/brk_computer/src/distribution/compute/block_loop.rs index 85c3c0c77..4fe88d976 100644 --- a/crates/brk_computer/src/distribution/compute/block_loop.rs +++ b/crates/brk_computer/src/distribution/compute/block_loop.rs @@ -4,12 +4,12 @@ use brk_cohort::ByAddressType; use brk_error::Result; use brk_indexer::Indexer; use brk_types::{DateIndex, Height, OutputType, Sats, TxIndex, TypeIndex}; -use log::info; use rayon::prelude::*; +use tracing::info; use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex}; use crate::{ - blocks, transactions, indexes, price, + blocks, distribution::{ address::AddressTypeToAddressCount, block::{ @@ -19,7 +19,7 @@ use crate::{ compute::write::{process_address_updates, write}, state::{BlockState, Transacted}, }, - inputs, outputs, + indexes, inputs, outputs, price, transactions, }; use super::{ @@ -58,25 +58,25 @@ pub fn process_blocks( // References to vectors using correct field paths // From indexer.vecs: - let height_to_first_txindex = &indexer.vecs.tx.height_to_first_txindex; - let height_to_first_txoutindex = &indexer.vecs.txout.height_to_first_txoutindex; - let height_to_first_txinindex = &indexer.vecs.txin.height_to_first_txinindex; + let height_to_first_txindex = &indexer.vecs.transactions.first_txindex; + let height_to_first_txoutindex = &indexer.vecs.outputs.first_txoutindex; + let height_to_first_txinindex = &indexer.vecs.inputs.first_txinindex; // From transactions and inputs/outputs (via .height or .height.sum_cum.sum patterns): - let height_to_tx_count = &transactions.count.indexes_to_tx_count.height; - let height_to_output_count = &outputs.count.indexes_to_count.height.sum_cum.sum.0; - let height_to_input_count = &inputs.count.indexes_to_count.height.sum_cum.sum.0; + let height_to_tx_count = &transactions.count.tx_count.height; + let height_to_output_count = &outputs.count.total_count.height.sum_cum.sum.0; + let height_to_input_count = &inputs.count.height.sum_cum.sum.0; // From blocks: - let height_to_timestamp = &blocks.time.height_to_timestamp_fixed; - let height_to_date = &blocks.time.height_to_date_fixed; - let dateindex_to_first_height = &indexes.time.dateindex_to_first_height; - let dateindex_to_height_count = &indexes.time.dateindex_to_height_count; - let txindex_to_output_count = &indexes.transaction.txindex_to_output_count; - let txindex_to_input_count = &indexes.transaction.txindex_to_input_count; + let height_to_timestamp = &blocks.time.timestamp_fixed; + let height_to_date = &blocks.time.date_fixed; + let dateindex_to_first_height = &indexes.dateindex.first_height; + let dateindex_to_height_count = &indexes.dateindex.height_count; + let txindex_to_output_count = &indexes.txindex.output_count; + let txindex_to_input_count = &indexes.txindex.input_count; // From price (optional): - let height_to_price = price.map(|p| &p.usd.chainindexes_to_price_close.height); - let dateindex_to_price = price.map(|p| &p.usd.timeindexes_to_price_close.dateindex); + let height_to_price = price.map(|p| &p.usd.split.close.height); + let dateindex_to_price = price.map(|p| &p.usd.split.close.dateindex); // Access pre-computed vectors from context for thread-safe access let height_to_price_vec = &ctx.height_to_price; @@ -103,7 +103,7 @@ pub fn process_blocks( // Build txindex -> height lookup map for efficient prev_height computation let mut txindex_to_height: RangeMap = { let mut map = RangeMap::with_capacity(last_height.to_usize() + 1); - for first_txindex in indexer.vecs.tx.height_to_first_txindex.into_iter() { + for first_txindex in indexer.vecs.transactions.first_txindex.into_iter() { map.push(first_txindex); } map @@ -114,65 +114,30 @@ pub fn process_blocks( let mut txin_iters = TxInIterators::new(indexer, inputs, &mut txindex_to_height); // Create iterators for first address indexes per type - let mut first_p2a_iter = indexer - .vecs - .address - .height_to_first_p2aaddressindex - .into_iter(); - let mut first_p2pk33_iter = indexer - .vecs - .address - .height_to_first_p2pk33addressindex - .into_iter(); - let mut first_p2pk65_iter = indexer - .vecs - .address - .height_to_first_p2pk65addressindex - .into_iter(); - let mut first_p2pkh_iter = indexer - .vecs - .address - .height_to_first_p2pkhaddressindex - .into_iter(); - let mut first_p2sh_iter = indexer - .vecs - .address - .height_to_first_p2shaddressindex - .into_iter(); - let mut first_p2tr_iter = indexer - .vecs - .address - .height_to_first_p2traddressindex - .into_iter(); - let mut first_p2wpkh_iter = indexer - .vecs - .address - .height_to_first_p2wpkhaddressindex - .into_iter(); - let mut first_p2wsh_iter = indexer - .vecs - .address - .height_to_first_p2wshaddressindex - .into_iter(); + let mut first_p2a_iter = indexer.vecs.addresses.first_p2aaddressindex.into_iter(); + let mut first_p2pk33_iter = indexer.vecs.addresses.first_p2pk33addressindex.into_iter(); + let mut first_p2pk65_iter = indexer.vecs.addresses.first_p2pk65addressindex.into_iter(); + let mut first_p2pkh_iter = indexer.vecs.addresses.first_p2pkhaddressindex.into_iter(); + let mut first_p2sh_iter = indexer.vecs.addresses.first_p2shaddressindex.into_iter(); + let mut first_p2tr_iter = indexer.vecs.addresses.first_p2traddressindex.into_iter(); + let mut first_p2wpkh_iter = indexer.vecs.addresses.first_p2wpkhaddressindex.into_iter(); + let mut first_p2wsh_iter = indexer.vecs.addresses.first_p2wshaddressindex.into_iter(); // Track running totals - recover from previous height if resuming - let (mut addresstype_to_addr_count, mut addresstype_to_empty_addr_count) = - if starting_height > Height::ZERO { - let addr_count = AddressTypeToAddressCount::from(( - &vecs.addresstype_to_height_to_addr_count, - starting_height, - )); - let empty_addr_count = AddressTypeToAddressCount::from(( - &vecs.addresstype_to_height_to_empty_addr_count, - starting_height, - )); - (addr_count, empty_addr_count) - } else { - ( - AddressTypeToAddressCount::default(), - AddressTypeToAddressCount::default(), - ) - }; + let (mut addr_counts, mut empty_addr_counts) = if starting_height > Height::ZERO { + let addr_counts = + AddressTypeToAddressCount::from((&vecs.addr_count.by_addresstype, starting_height)); + let empty_addr_counts = AddressTypeToAddressCount::from(( + &vecs.empty_addr_count.by_addresstype, + starting_height, + )); + (addr_counts, empty_addr_counts) + } else { + ( + AddressTypeToAddressCount::default(), + AddressTypeToAddressCount::default(), + ) + }; let mut cache = AddressCache::new(); @@ -333,8 +298,8 @@ pub fn process_blocks( &mut vecs.address_cohorts, &mut lookup, block_price, - &mut addresstype_to_addr_count, - &mut addresstype_to_empty_addr_count, + &mut addr_counts, + &mut empty_addr_counts, ); // Process sent inputs (addresses sending funds) @@ -344,8 +309,8 @@ pub fn process_blocks( &mut vecs.address_cohorts, &mut lookup, block_price, - &mut addresstype_to_addr_count, - &mut addresstype_to_empty_addr_count, + &mut addr_counts, + &mut empty_addr_counts, height_to_price_vec.as_deref(), height_to_timestamp_vec, height, @@ -361,10 +326,13 @@ pub fn process_blocks( }); // Push to height-indexed vectors - vecs.addresstype_to_height_to_addr_count - .truncate_push(height, &addresstype_to_addr_count)?; - vecs.addresstype_to_height_to_empty_addr_count - .truncate_push(height, &addresstype_to_empty_addr_count)?; + vecs.addr_count + .truncate_push_height(height, addr_counts.sum(), &addr_counts)?; + vecs.empty_addr_count.truncate_push_height( + height, + empty_addr_counts.sum(), + &empty_addr_counts, + )?; // Get date info for unrealized state computation let date = height_to_date_iter.get_unwrap(height); diff --git a/crates/brk_computer/src/distribution/compute/context.rs b/crates/brk_computer/src/distribution/compute/context.rs index d2be94f34..704aa41c4 100644 --- a/crates/brk_computer/src/distribution/compute/context.rs +++ b/crates/brk_computer/src/distribution/compute/context.rs @@ -26,11 +26,10 @@ impl ComputeContext { blocks: &blocks::Vecs, price: Option<&price::Vecs>, ) -> Self { - let height_to_timestamp: Vec = - blocks.time.height_to_timestamp_fixed.into_iter().collect(); + let height_to_timestamp: Vec = blocks.time.timestamp_fixed.into_iter().collect(); let height_to_price: Option> = price - .map(|p| &p.usd.chainindexes_to_price_close.height) + .map(|p| &p.usd.split.close.height) .map(|v| v.into_iter().map(|d| *d).collect()); Self { diff --git a/crates/brk_computer/src/distribution/compute/readers.rs b/crates/brk_computer/src/distribution/compute/readers.rs index 397d445cb..32410de7a 100644 --- a/crates/brk_computer/src/distribution/compute/readers.rs +++ b/crates/brk_computer/src/distribution/compute/readers.rs @@ -37,9 +37,9 @@ pub struct TxOutIterators<'a> { impl<'a> TxOutIterators<'a> { pub fn new(indexer: &'a Indexer) -> Self { Self { - value_iter: indexer.vecs.txout.txoutindex_to_value.into_iter(), - outputtype_iter: indexer.vecs.txout.txoutindex_to_outputtype.into_iter(), - typeindex_iter: indexer.vecs.txout.txoutindex_to_typeindex.into_iter(), + value_iter: indexer.vecs.outputs.value.into_iter(), + outputtype_iter: indexer.vecs.outputs.outputtype.into_iter(), + typeindex_iter: indexer.vecs.outputs.typeindex.into_iter(), } } @@ -75,10 +75,10 @@ impl<'a> TxInIterators<'a> { txindex_to_height: &'a mut RangeMap, ) -> Self { Self { - value_iter: txins.spent.txinindex_to_value.into_iter(), - outpoint_iter: indexer.vecs.txin.txinindex_to_outpoint.into_iter(), - outputtype_iter: indexer.vecs.txin.txinindex_to_outputtype.into_iter(), - typeindex_iter: indexer.vecs.txin.txinindex_to_typeindex.into_iter(), + value_iter: txins.spent.value.into_iter(), + outpoint_iter: indexer.vecs.inputs.outpoint.into_iter(), + outputtype_iter: indexer.vecs.inputs.outputtype.into_iter(), + typeindex_iter: indexer.vecs.inputs.typeindex.into_iter(), txindex_to_height, } } diff --git a/crates/brk_computer/src/distribution/compute/recover.rs b/crates/brk_computer/src/distribution/compute/recover.rs index 5d3944479..470dcfafe 100644 --- a/crates/brk_computer/src/distribution/compute/recover.rs +++ b/crates/brk_computer/src/distribution/compute/recover.rs @@ -95,15 +95,18 @@ pub fn reset_state( } /// Check if we can resume from a checkpoint or need to start fresh. -pub fn determine_start_mode(computed_min: Height, chain_state_height: Height) -> StartMode { +/// +/// - `min_available`: minimum height we have data for across all stateful vecs +/// - `resume_target`: the height we want to resume processing from +pub fn determine_start_mode(min_available: Height, resume_target: Height) -> StartMode { // No data to resume from - if chain_state_height.is_zero() { + if resume_target.is_zero() { return StartMode::Fresh; } - match computed_min.cmp(&chain_state_height) { - Ordering::Greater => unreachable!("min height > chain state height"), - Ordering::Equal => StartMode::Resume(chain_state_height), + match min_available.cmp(&resume_target) { + Ordering::Greater => unreachable!("min_available > resume_target"), + Ordering::Equal => StartMode::Resume(resume_target), Ordering::Less => StartMode::Fresh, } } diff --git a/crates/brk_computer/src/distribution/compute/write.rs b/crates/brk_computer/src/distribution/compute/write.rs index 580bc42f1..5d7bc2d28 100644 --- a/crates/brk_computer/src/distribution/compute/write.rs +++ b/crates/brk_computer/src/distribution/compute/write.rs @@ -2,8 +2,8 @@ use std::time::Instant; use brk_error::Result; use brk_types::Height; -use log::info; use rayon::prelude::*; +use tracing::info; use vecdb::{AnyStoredVec, GenericStoredVec, Stamp}; use crate::distribution::{ @@ -77,11 +77,8 @@ pub fn write( vecs.any_address_indexes .par_iter_mut() .chain(vecs.addresses_data.par_iter_mut()) - .chain(vecs.addresstype_to_height_to_addr_count.par_iter_mut()) - .chain( - vecs.addresstype_to_height_to_empty_addr_count - .par_iter_mut(), - ) + .chain(vecs.addr_count.par_iter_height_mut()) + .chain(vecs.empty_addr_count.par_iter_height_mut()) .chain(rayon::iter::once( &mut vecs.chain_state as &mut dyn AnyStoredVec, )) diff --git a/crates/brk_computer/src/distribution/metrics/activity.rs b/crates/brk_computer/src/distribution/metrics/activity.rs index b6487cea7..841315558 100644 --- a/crates/brk_computer/src/distribution/metrics/activity.rs +++ b/crates/brk_computer/src/distribution/metrics/activity.rs @@ -2,14 +2,11 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Bitcoin, Height, Sats, StoredF64, Version}; use rayon::prelude::*; -use vecdb::{ - AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, - PcoVec, -}; +use vecdb::{AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, PcoVec}; use crate::{ ComputeIndexes, indexes, - internal::{ComputedBlockSumCum, DerivedValueBlockSumCum}, + internal::{ComputedBlockSumCum, LazyComputedValueBlockSumCum}, }; use super::ImportConfig; @@ -17,63 +14,54 @@ use super::ImportConfig; /// Activity metrics for a cohort. #[derive(Clone, Traversable)] pub struct ActivityMetrics { - /// Total satoshis sent at each height - pub height_to_sent: EagerVec>, - - /// Sent amounts indexed by various dimensions (derives from height_to_sent) - pub indexes_to_sent: DerivedValueBlockSumCum, + /// Total satoshis sent at each height + derived indexes + pub sent: LazyComputedValueBlockSumCum, /// Satoshi-blocks destroyed (supply * blocks_old when spent) - pub height_to_satblocks_destroyed: EagerVec>, + pub satblocks_destroyed: EagerVec>, /// Satoshi-days destroyed (supply * days_old when spent) - pub height_to_satdays_destroyed: EagerVec>, + pub satdays_destroyed: EagerVec>, /// Coin-blocks destroyed (in BTC rather than sats) - pub indexes_to_coinblocks_destroyed: ComputedBlockSumCum, + pub coinblocks_destroyed: ComputedBlockSumCum, /// Coin-days destroyed (in BTC rather than sats) - pub indexes_to_coindays_destroyed: ComputedBlockSumCum, + pub coindays_destroyed: ComputedBlockSumCum, } impl ActivityMetrics { /// Import activity metrics from database. pub fn forced_import(cfg: &ImportConfig) -> Result { - let height_to_sent: EagerVec> = - EagerVec::forced_import(cfg.db, &cfg.name("sent"), cfg.version)?; - let indexes_to_sent = DerivedValueBlockSumCum::forced_import( - cfg.db, - &cfg.name("sent"), - cfg.version, - cfg.indexes, - height_to_sent.boxed_clone(), - cfg.price, - )?; - Ok(Self { - height_to_sent, - indexes_to_sent, + sent: LazyComputedValueBlockSumCum::forced_import( + cfg.db, + &cfg.name("sent"), + cfg.version, + cfg.indexes, + cfg.price, + )?, - height_to_satblocks_destroyed: EagerVec::forced_import( + satblocks_destroyed: EagerVec::forced_import( cfg.db, &cfg.name("satblocks_destroyed"), cfg.version, )?, - height_to_satdays_destroyed: EagerVec::forced_import( + satdays_destroyed: EagerVec::forced_import( cfg.db, &cfg.name("satdays_destroyed"), cfg.version, )?, - indexes_to_coinblocks_destroyed: ComputedBlockSumCum::forced_import( + coinblocks_destroyed: ComputedBlockSumCum::forced_import( cfg.db, &cfg.name("coinblocks_destroyed"), cfg.version, cfg.indexes, )?, - indexes_to_coindays_destroyed: ComputedBlockSumCum::forced_import( + coindays_destroyed: ComputedBlockSumCum::forced_import( cfg.db, &cfg.name("coindays_destroyed"), cfg.version, @@ -84,10 +72,12 @@ impl ActivityMetrics { /// Get minimum length across height-indexed vectors. pub fn min_len(&self) -> usize { - self.height_to_sent + self.sent + .sats + .height .len() - .min(self.height_to_satblocks_destroyed.len()) - .min(self.height_to_satdays_destroyed.len()) + .min(self.satblocks_destroyed.len()) + .min(self.satdays_destroyed.len()) } /// Push activity state values to height-indexed vectors. @@ -98,28 +88,28 @@ impl ActivityMetrics { satblocks_destroyed: Sats, satdays_destroyed: Sats, ) -> Result<()> { - self.height_to_sent.truncate_push(height, sent)?; - self.height_to_satblocks_destroyed + self.sent.sats.height.truncate_push(height, sent)?; + self.satblocks_destroyed .truncate_push(height, satblocks_destroyed)?; - self.height_to_satdays_destroyed + self.satdays_destroyed .truncate_push(height, satdays_destroyed)?; Ok(()) } /// Write height-indexed vectors to disk. pub fn write(&mut self) -> Result<()> { - self.height_to_sent.write()?; - self.height_to_satblocks_destroyed.write()?; - self.height_to_satdays_destroyed.write()?; + self.sent.sats.height.write()?; + self.satblocks_destroyed.write()?; + self.satdays_destroyed.write()?; Ok(()) } /// Returns a parallel iterator over all vecs for parallel writing. pub fn par_iter_mut(&mut self) -> impl ParallelIterator { vec![ - &mut self.height_to_sent as &mut dyn AnyStoredVec, - &mut self.height_to_satblocks_destroyed as &mut dyn AnyStoredVec, - &mut self.height_to_satdays_destroyed as &mut dyn AnyStoredVec, + &mut self.sent.sats.height as &mut dyn AnyStoredVec, + &mut self.satblocks_destroyed as &mut dyn AnyStoredVec, + &mut self.satdays_destroyed as &mut dyn AnyStoredVec, ] .into_par_iter() } @@ -137,24 +127,27 @@ impl ActivityMetrics { others: &[&Self], exit: &Exit, ) -> Result<()> { - self.height_to_sent.compute_sum_of_others( - starting_indexes.height, - &others.iter().map(|v| &v.height_to_sent).collect::>(), - exit, - )?; - self.height_to_satblocks_destroyed.compute_sum_of_others( + self.sent.sats.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_satblocks_destroyed) + .map(|v| &v.sent.sats.height) .collect::>(), exit, )?; - self.height_to_satdays_destroyed.compute_sum_of_others( + self.satblocks_destroyed.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_satdays_destroyed) + .map(|v| &v.satblocks_destroyed) + .collect::>(), + exit, + )?; + self.satdays_destroyed.compute_sum_of_others( + starting_indexes.height, + &others + .iter() + .map(|v| &v.satdays_destroyed) .collect::>(), exit, )?; @@ -168,29 +161,24 @@ impl ActivityMetrics { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_sent.derive_from( - indexes, - starting_indexes, - &self.height_to_sent, - exit, - )?; + self.sent.compute_rest(indexes, starting_indexes, exit)?; - self.indexes_to_coinblocks_destroyed + self.coinblocks_destroyed .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform( starting_indexes.height, - &self.height_to_satblocks_destroyed, + &self.satblocks_destroyed, |(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))), exit, )?; Ok(()) })?; - self.indexes_to_coindays_destroyed + self.coindays_destroyed .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform( starting_indexes.height, - &self.height_to_satdays_destroyed, + &self.satdays_destroyed, |(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))), exit, )?; diff --git a/crates/brk_computer/src/distribution/metrics/cost_basis.rs b/crates/brk_computer/src/distribution/metrics/cost_basis.rs index 454be7bb3..f6bdcea9a 100644 --- a/crates/brk_computer/src/distribution/metrics/cost_basis.rs +++ b/crates/brk_computer/src/distribution/metrics/cost_basis.rs @@ -2,15 +2,13 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{DateIndex, Dollars, Height, Version}; use rayon::prelude::*; -use vecdb::{ - AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, - PcoVec, -}; +use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec}; use crate::{ ComputeIndexes, distribution::state::CohortState, - internal::{CostBasisPercentiles, DerivedComputedBlockLast}, + indexes, + internal::{ComputedBlockLast, CostBasisPercentiles}, }; use super::ImportConfig; @@ -19,12 +17,10 @@ use super::ImportConfig; #[derive(Clone, Traversable)] pub struct CostBasisMetrics { /// Minimum cost basis for any UTXO at this height - pub height_to_min_cost_basis: EagerVec>, - pub indexes_to_min_cost_basis: DerivedComputedBlockLast, + pub min: ComputedBlockLast, /// Maximum cost basis for any UTXO at this height - pub height_to_max_cost_basis: EagerVec>, - pub indexes_to_max_cost_basis: DerivedComputedBlockLast, + pub max: ComputedBlockLast, /// Cost basis distribution percentiles (median, quartiles, etc.) pub percentiles: Option, @@ -35,29 +31,19 @@ impl CostBasisMetrics { pub fn forced_import(cfg: &ImportConfig) -> Result { let extended = cfg.extended(); - let height_to_min_cost_basis = - EagerVec::forced_import(cfg.db, &cfg.name("min_cost_basis"), cfg.version)?; - - let height_to_max_cost_basis = - EagerVec::forced_import(cfg.db, &cfg.name("max_cost_basis"), cfg.version)?; - Ok(Self { - indexes_to_min_cost_basis: DerivedComputedBlockLast::forced_import( + min: ComputedBlockLast::forced_import( cfg.db, &cfg.name("min_cost_basis"), - height_to_min_cost_basis.boxed_clone(), cfg.version, cfg.indexes, )?, - indexes_to_max_cost_basis: DerivedComputedBlockLast::forced_import( + max: ComputedBlockLast::forced_import( cfg.db, &cfg.name("max_cost_basis"), - height_to_max_cost_basis.boxed_clone(), cfg.version, cfg.indexes, )?, - height_to_min_cost_basis, - height_to_max_cost_basis, percentiles: extended .then(|| { CostBasisPercentiles::forced_import( @@ -74,9 +60,7 @@ impl CostBasisMetrics { /// Get minimum length across height-indexed vectors written in block loop. pub fn min_stateful_height_len(&self) -> usize { - self.height_to_min_cost_basis - .len() - .min(self.height_to_max_cost_basis.len()) + self.min.height.len().min(self.max.height.len()) } /// Get minimum length across dateindex-indexed vectors written in block loop. @@ -89,14 +73,14 @@ impl CostBasisMetrics { /// Push min/max cost basis from state. pub fn truncate_push_minmax(&mut self, height: Height, state: &CohortState) -> Result<()> { - self.height_to_min_cost_basis.truncate_push( + self.min.height.truncate_push( height, state .price_to_amount_first_key_value() .map(|(dollars, _)| dollars) .unwrap_or(Dollars::NAN), )?; - self.height_to_max_cost_basis.truncate_push( + self.max.height.truncate_push( height, state .price_to_amount_last_key_value() @@ -122,8 +106,8 @@ impl CostBasisMetrics { /// Write height-indexed vectors to disk. pub fn write(&mut self) -> Result<()> { - self.height_to_min_cost_basis.write()?; - self.height_to_max_cost_basis.write()?; + self.min.height.write()?; + self.max.height.write()?; if let Some(percentiles) = self.percentiles.as_mut() { percentiles.write()?; } @@ -132,10 +116,7 @@ impl CostBasisMetrics { /// Returns a parallel iterator over all vecs for parallel writing. pub fn par_iter_mut(&mut self) -> impl ParallelIterator { - let mut vecs: Vec<&mut dyn AnyStoredVec> = vec![ - &mut self.height_to_min_cost_basis, - &mut self.height_to_max_cost_basis, - ]; + let mut vecs: Vec<&mut dyn AnyStoredVec> = vec![&mut self.min.height, &mut self.max.height]; if let Some(percentiles) = self.percentiles.as_mut() { vecs.extend( percentiles @@ -163,20 +144,14 @@ impl CostBasisMetrics { others: &[&Self], exit: &Exit, ) -> Result<()> { - self.height_to_min_cost_basis.compute_min_of_others( + self.min.height.compute_min_of_others( starting_indexes.height, - &others - .iter() - .map(|v| &v.height_to_min_cost_basis) - .collect::>(), + &others.iter().map(|v| &v.min.height).collect::>(), exit, )?; - self.height_to_max_cost_basis.compute_max_of_others( + self.max.height.compute_max_of_others( starting_indexes.height, - &others - .iter() - .map(|v| &v.height_to_max_cost_basis) - .collect::>(), + &others.iter().map(|v| &v.max.height).collect::>(), exit, )?; Ok(()) @@ -185,24 +160,12 @@ impl CostBasisMetrics { /// First phase of computed metrics (indexes from height). pub fn compute_rest_part1( &mut self, - indexes: &crate::indexes::Vecs, + indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_min_cost_basis.derive_from( - indexes, - starting_indexes, - &self.height_to_min_cost_basis, - exit, - )?; - - self.indexes_to_max_cost_basis.derive_from( - indexes, - starting_indexes, - &self.height_to_max_cost_basis, - exit, - )?; - + self.min.compute_rest(indexes, starting_indexes, exit)?; + self.max.compute_rest(indexes, starting_indexes, exit)?; Ok(()) } } diff --git a/crates/brk_computer/src/distribution/metrics/mod.rs b/crates/brk_computer/src/distribution/metrics/mod.rs index 825ff1b10..fc06610fd 100644 --- a/crates/brk_computer/src/distribution/metrics/mod.rs +++ b/crates/brk_computer/src/distribution/metrics/mod.rs @@ -1,6 +1,7 @@ mod activity; mod config; mod cost_basis; +mod outputs; mod realized; mod relative; mod supply; @@ -9,6 +10,7 @@ mod unrealized; pub use activity::*; pub use config::*; pub use cost_basis::*; +pub use outputs::*; pub use realized::*; pub use relative::*; pub use supply::*; @@ -29,9 +31,12 @@ pub struct CohortMetrics { #[traversable(skip)] pub filter: Filter, - /// Supply and UTXO count (always computed) + /// Supply metrics (always computed) pub supply: SupplyMetrics, + /// Output metrics - UTXO count (always computed) + pub outputs: OutputsMetrics, + /// Transaction activity (always computed) pub activity: ActivityMetrics, @@ -58,6 +63,7 @@ impl CohortMetrics { let compute_dollars = cfg.compute_dollars(); let supply = SupplyMetrics::forced_import(cfg)?; + let outputs = OutputsMetrics::forced_import(cfg)?; let unrealized = compute_dollars .then(|| UnrealizedMetrics::forced_import(cfg)) @@ -71,6 +77,7 @@ impl CohortMetrics { Ok(Self { filter: cfg.filter.clone(), supply, + outputs, activity: ActivityMetrics::forced_import(cfg)?, realized: compute_dollars .then(|| RealizedMetrics::forced_import(cfg)) @@ -85,7 +92,7 @@ impl CohortMetrics { /// Get minimum length across height-indexed vectors written in block loop. pub fn min_stateful_height_len(&self) -> usize { - let mut min = self.supply.min_len().min(self.activity.min_len()); + let mut min = self.supply.min_len().min(self.outputs.min_len()).min(self.activity.min_len()); if let Some(realized) = &self.realized { min = min.min(realized.min_stateful_height_len()); @@ -116,7 +123,8 @@ impl CohortMetrics { /// Push state values to height-indexed vectors. pub fn truncate_push(&mut self, height: Height, state: &CohortState) -> Result<()> { - self.supply.truncate_push(height, &state.supply)?; + self.supply.truncate_push(height, state.supply.value)?; + self.outputs.truncate_push(height, state.supply.utxo_count)?; self.activity.truncate_push( height, state.sent, @@ -136,6 +144,7 @@ impl CohortMetrics { /// Write height-indexed vectors to disk. pub fn write(&mut self) -> Result<()> { self.supply.write()?; + self.outputs.write()?; self.activity.write()?; if let Some(realized) = self.realized.as_mut() { @@ -158,6 +167,7 @@ impl CohortMetrics { let mut vecs: Vec<&mut dyn AnyStoredVec> = Vec::new(); vecs.extend(self.supply.par_iter_mut().collect::>()); + vecs.extend(self.outputs.par_iter_mut().collect::>()); vecs.extend(self.activity.par_iter_mut().collect::>()); if let Some(realized) = self.realized.as_mut() { @@ -242,6 +252,11 @@ impl CohortMetrics { &others.iter().map(|v| &v.supply).collect::>(), exit, )?; + self.outputs.compute_from_stateful( + starting_indexes, + &others.iter().map(|v| &v.outputs).collect::>(), + exit, + )?; self.activity.compute_from_stateful( starting_indexes, &others.iter().map(|v| &v.activity).collect::>(), @@ -294,6 +309,8 @@ impl CohortMetrics { ) -> Result<()> { self.supply .compute_rest_part1(indexes, price, starting_indexes, exit)?; + self.outputs + .compute_rest(indexes, starting_indexes, exit)?; self.activity .compute_rest_part1(indexes, starting_indexes, exit)?; @@ -328,7 +345,7 @@ impl CohortMetrics { indexes, price, starting_indexes, - &self.supply.height_to_supply_value.bitcoin, + &self.supply.supply.bitcoin.height, height_to_market_cap, dateindex_to_market_cap, exit, diff --git a/crates/brk_computer/src/distribution/metrics/outputs.rs b/crates/brk_computer/src/distribution/metrics/outputs.rs new file mode 100644 index 000000000..c3031c018 --- /dev/null +++ b/crates/brk_computer/src/distribution/metrics/outputs.rs @@ -0,0 +1,81 @@ +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Height, StoredU64}; +use rayon::prelude::*; +use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec}; + +use crate::{ComputeIndexes, indexes, internal::ComputedBlockLast}; + +use super::ImportConfig; + +/// Output metrics for a cohort. +#[derive(Clone, Traversable)] +pub struct OutputsMetrics { + pub utxo_count: ComputedBlockLast, +} + +impl OutputsMetrics { + /// Import output metrics from database. + pub fn forced_import(cfg: &ImportConfig) -> Result { + Ok(Self { + utxo_count: ComputedBlockLast::forced_import( + cfg.db, + &cfg.name("utxo_count"), + cfg.version, + cfg.indexes, + )?, + }) + } + + /// Get minimum length across height-indexed vectors. + pub fn min_len(&self) -> usize { + self.utxo_count.height.len() + } + + /// Push utxo count to height-indexed vector. + pub fn truncate_push(&mut self, height: Height, utxo_count: u64) -> Result<()> { + self.utxo_count + .height + .truncate_push(height, StoredU64::from(utxo_count))?; + Ok(()) + } + + /// Write height-indexed vectors to disk. + pub fn write(&mut self) -> Result<()> { + self.utxo_count.height.write()?; + Ok(()) + } + + /// Returns a parallel iterator over all vecs for parallel writing. + pub fn par_iter_mut(&mut self) -> impl ParallelIterator { + vec![&mut self.utxo_count.height as &mut dyn AnyStoredVec].into_par_iter() + } + + /// Compute aggregate values from separate cohorts. + pub fn compute_from_stateful( + &mut self, + starting_indexes: &ComputeIndexes, + others: &[&Self], + exit: &Exit, + ) -> Result<()> { + self.utxo_count.height.compute_sum_of_others( + starting_indexes.height, + &others + .iter() + .map(|v| &v.utxo_count.height) + .collect::>(), + exit, + )?; + Ok(()) + } + + /// Compute derived metrics (dateindex from height). + pub fn compute_rest( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.utxo_count.compute_rest(indexes, starting_indexes, exit) + } +} diff --git a/crates/brk_computer/src/distribution/metrics/realized.rs b/crates/brk_computer/src/distribution/metrics/realized.rs index d088f86d4..487b67571 100644 --- a/crates/brk_computer/src/distribution/metrics/realized.rs +++ b/crates/brk_computer/src/distribution/metrics/realized.rs @@ -12,10 +12,9 @@ use crate::{ distribution::state::RealizedState, indexes, internal::{ - BinaryBlockSum, BinaryBlockSumCumLast, ComputedBlockLast, ComputedBlockSum, - ComputedBlockSumCum, ComputedDateLast, ComputedRatioVecsDate, DerivedComputedBlockLast, - DerivedComputedBlockSum, DerivedComputedBlockSumCum, DollarsMinus, LazyBlockSum, - LazyBlockSumCum, LazyDateLast, PercentageDollarsF32, StoredF32Identity, + BinaryBlockSum, BinaryBlockSumCum, ComputedBlockLast, ComputedBlockSum, + ComputedBlockSumCum, ComputedDateLast, ComputedRatioVecsDate, DollarsMinus, + LazyBlockSum, LazyBlockSumCum, LazyDateLast, PercentageDollarsF32, StoredF32Identity, }, price, }; @@ -26,69 +25,57 @@ use super::ImportConfig; #[derive(Clone, Traversable)] pub struct RealizedMetrics { // === Realized Cap === - pub height_to_realized_cap: EagerVec>, - pub indexes_to_realized_cap: DerivedComputedBlockLast, - pub indexes_to_realized_price: ComputedBlockLast, - pub indexes_to_realized_price_extra: ComputedRatioVecsDate, - pub indexes_to_realized_cap_rel_to_own_market_cap: Option>, - pub indexes_to_realized_cap_30d_delta: ComputedDateLast, + pub realized_cap: ComputedBlockLast, + pub realized_price: ComputedBlockLast, + pub realized_price_extra: ComputedRatioVecsDate, + pub realized_cap_rel_to_own_market_cap: Option>, + pub realized_cap_30d_delta: ComputedDateLast, // === MVRV (Market Value to Realized Value) === - // Proxy for indexes_to_realized_price_extra.ratio (close / realized_price = market_cap / realized_cap) - pub indexes_to_mvrv: LazyDateLast, + // Proxy for realized_price_extra.ratio (close / realized_price = market_cap / realized_cap) + pub mvrv: LazyDateLast, // === Realized Profit/Loss === - pub height_to_realized_profit: EagerVec>, - pub indexes_to_realized_profit: DerivedComputedBlockSumCum, - pub height_to_realized_loss: EagerVec>, - pub indexes_to_realized_loss: DerivedComputedBlockSumCum, - pub indexes_to_neg_realized_loss: LazyBlockSumCum, - pub indexes_to_net_realized_pnl: ComputedBlockSumCum, - pub indexes_to_realized_value: ComputedBlockSum, + pub realized_profit: ComputedBlockSumCum, + pub realized_loss: ComputedBlockSumCum, + pub neg_realized_loss: LazyBlockSumCum, + pub net_realized_pnl: ComputedBlockSumCum, + pub realized_value: ComputedBlockSum, // === Realized vs Realized Cap Ratios (lazy) === - pub indexes_to_realized_profit_rel_to_realized_cap: - BinaryBlockSumCumLast, - pub indexes_to_realized_loss_rel_to_realized_cap: - BinaryBlockSumCumLast, - pub indexes_to_net_realized_pnl_rel_to_realized_cap: - BinaryBlockSumCumLast, + pub realized_profit_rel_to_realized_cap: BinaryBlockSumCum, + pub realized_loss_rel_to_realized_cap: BinaryBlockSumCum, + pub net_realized_pnl_rel_to_realized_cap: BinaryBlockSumCum, // === Total Realized PnL === - pub indexes_to_total_realized_pnl: LazyBlockSum, - pub dateindex_to_realized_profit_to_loss_ratio: Option>>, + pub total_realized_pnl: LazyBlockSum, + pub realized_profit_to_loss_ratio: Option>>, // === Value Created/Destroyed === - pub height_to_value_created: EagerVec>, - #[traversable(rename = "value_created_sum")] - pub indexes_to_value_created: DerivedComputedBlockSum, - pub height_to_value_destroyed: EagerVec>, - #[traversable(rename = "value_destroyed_sum")] - pub indexes_to_value_destroyed: DerivedComputedBlockSum, + pub value_created: ComputedBlockSum, + pub value_destroyed: ComputedBlockSum, // === Adjusted Value (lazy: cohort - up_to_1h) === - pub indexes_to_adjusted_value_created: Option>, - pub indexes_to_adjusted_value_destroyed: Option>, + pub adjusted_value_created: Option>, + pub adjusted_value_destroyed: Option>, // === SOPR (Spent Output Profit Ratio) === - pub dateindex_to_sopr: EagerVec>, - pub dateindex_to_sopr_7d_ema: EagerVec>, - pub dateindex_to_sopr_30d_ema: EagerVec>, - pub dateindex_to_adjusted_sopr: Option>>, - pub dateindex_to_adjusted_sopr_7d_ema: Option>>, - pub dateindex_to_adjusted_sopr_30d_ema: Option>>, + pub sopr: EagerVec>, + pub sopr_7d_ema: EagerVec>, + pub sopr_30d_ema: EagerVec>, + pub adjusted_sopr: Option>>, + pub adjusted_sopr_7d_ema: Option>>, + pub adjusted_sopr_30d_ema: Option>>, // === Sell Side Risk === - pub dateindex_to_sell_side_risk_ratio: EagerVec>, - pub dateindex_to_sell_side_risk_ratio_7d_ema: EagerVec>, - pub dateindex_to_sell_side_risk_ratio_30d_ema: EagerVec>, + pub sell_side_risk_ratio: EagerVec>, + pub sell_side_risk_ratio_7d_ema: EagerVec>, + pub sell_side_risk_ratio_30d_ema: EagerVec>, // === Net Realized PnL Deltas === - pub indexes_to_net_realized_pnl_cumulative_30d_delta: ComputedDateLast, - pub indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: - ComputedDateLast, - pub indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: - ComputedDateLast, + pub net_realized_pnl_cumulative_30d_delta: ComputedDateLast, + pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: ComputedDateLast, + pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: ComputedDateLast, } impl RealizedMetrics { @@ -99,26 +86,44 @@ impl RealizedMetrics { let extended = cfg.extended(); let compute_adjusted = cfg.compute_adjusted(); - let height_to_realized_loss: EagerVec> = - EagerVec::forced_import(cfg.db, &cfg.name("realized_loss"), cfg.version)?; - - let indexes_to_realized_loss = DerivedComputedBlockSumCum::forced_import( + // Import combined types using forced_import which handles height + derived + let realized_cap = ComputedBlockLast::forced_import( cfg.db, - &cfg.name("realized_loss"), - height_to_realized_loss.boxed_clone(), + &cfg.name("realized_cap"), cfg.version, cfg.indexes, )?; - let indexes_to_neg_realized_loss = LazyBlockSumCum::from_derived::( + let realized_profit = ComputedBlockSumCum::forced_import( + cfg.db, + &cfg.name("realized_profit"), + cfg.version, + cfg.indexes, + )?; + + let realized_loss = ComputedBlockSumCum::forced_import( + cfg.db, + &cfg.name("realized_loss"), + cfg.version, + cfg.indexes, + )?; + + let neg_realized_loss = LazyBlockSumCum::from_computed::( &cfg.name("neg_realized_loss"), cfg.version + v1, - height_to_realized_loss.boxed_clone(), - &indexes_to_realized_loss, + realized_loss.height.boxed_clone(), + &realized_loss, ); + let net_realized_pnl = ComputedBlockSumCum::forced_import( + cfg.db, + &cfg.name("net_realized_pnl"), + cfg.version, + cfg.indexes, + )?; + // realized_value is the source for total_realized_pnl (they're identical) - let indexes_to_realized_value = ComputedBlockSum::forced_import( + let realized_value = ComputedBlockSum::forced_import( cfg.db, &cfg.name("realized_value"), cfg.version, @@ -126,132 +131,92 @@ impl RealizedMetrics { )?; // total_realized_pnl is a lazy alias to realized_value - let indexes_to_total_realized_pnl = LazyBlockSum::from_computed::( + let total_realized_pnl = LazyBlockSum::from_computed::( &cfg.name("total_realized_pnl"), cfg.version + v1, - indexes_to_realized_value.height.boxed_clone(), - &indexes_to_realized_value, + realized_value.height.boxed_clone(), + &realized_value, ); - // Extract vecs needed for lazy ratio construction - let height_to_realized_cap: EagerVec> = - EagerVec::forced_import(cfg.db, &cfg.name("realized_cap"), cfg.version)?; - - let indexes_to_realized_cap = DerivedComputedBlockLast::forced_import( - cfg.db, - &cfg.name("realized_cap"), - height_to_realized_cap.boxed_clone(), - cfg.version, - cfg.indexes, - )?; - - let height_to_realized_profit: EagerVec> = - EagerVec::forced_import(cfg.db, &cfg.name("realized_profit"), cfg.version)?; - - let indexes_to_realized_profit = DerivedComputedBlockSumCum::forced_import( - cfg.db, - &cfg.name("realized_profit"), - height_to_realized_profit.boxed_clone(), - cfg.version, - cfg.indexes, - )?; - - let indexes_to_net_realized_pnl = ComputedBlockSumCum::forced_import( - cfg.db, - &cfg.name("net_realized_pnl"), - cfg.version, - cfg.indexes, - )?; - - // Construct lazy ratio vecs (before struct assignment to satisfy borrow checker) - let indexes_to_realized_profit_rel_to_realized_cap = - BinaryBlockSumCumLast::from_derived::( + // Construct lazy ratio vecs + let realized_profit_rel_to_realized_cap = + BinaryBlockSumCum::from_computed_last::( &cfg.name("realized_profit_rel_to_realized_cap"), cfg.version + v1, - height_to_realized_profit.boxed_clone(), - height_to_realized_cap.boxed_clone(), - &indexes_to_realized_profit, - &indexes_to_realized_cap, + realized_profit.height.boxed_clone(), + realized_cap.height.boxed_clone(), + &realized_profit, + &realized_cap, ); - let indexes_to_realized_loss_rel_to_realized_cap = - BinaryBlockSumCumLast::from_derived::( + let realized_loss_rel_to_realized_cap = + BinaryBlockSumCum::from_computed_last::( &cfg.name("realized_loss_rel_to_realized_cap"), cfg.version + v1, - height_to_realized_loss.boxed_clone(), - height_to_realized_cap.boxed_clone(), - &indexes_to_realized_loss, - &indexes_to_realized_cap, + realized_loss.height.boxed_clone(), + realized_cap.height.boxed_clone(), + &realized_loss, + &realized_cap, ); - let indexes_to_net_realized_pnl_rel_to_realized_cap = - BinaryBlockSumCumLast::from_computed_derived::( + let net_realized_pnl_rel_to_realized_cap = + BinaryBlockSumCum::from_computed_last::( &cfg.name("net_realized_pnl_rel_to_realized_cap"), cfg.version + v1, - indexes_to_net_realized_pnl.height.boxed_clone(), - height_to_realized_cap.boxed_clone(), - &indexes_to_net_realized_pnl, - &indexes_to_realized_cap, + net_realized_pnl.height.boxed_clone(), + realized_cap.height.boxed_clone(), + &net_realized_pnl, + &realized_cap, ); - let indexes_to_realized_price = ComputedBlockLast::forced_import( + let realized_price = ComputedBlockLast::forced_import( cfg.db, &cfg.name("realized_price"), cfg.version + v1, cfg.indexes, )?; - let height_to_value_created = - EagerVec::forced_import(cfg.db, &cfg.name("value_created"), cfg.version)?; - let height_to_value_destroyed = - EagerVec::forced_import(cfg.db, &cfg.name("value_destroyed"), cfg.version)?; - - let indexes_to_value_created = DerivedComputedBlockSum::forced_import( + let value_created = ComputedBlockSum::forced_import( cfg.db, &cfg.name("value_created"), - height_to_value_created.boxed_clone(), cfg.version, cfg.indexes, )?; - let indexes_to_value_destroyed = DerivedComputedBlockSum::forced_import( + + let value_destroyed = ComputedBlockSum::forced_import( cfg.db, &cfg.name("value_destroyed"), - height_to_value_destroyed.boxed_clone(), cfg.version, cfg.indexes, )?; // Create lazy adjusted vecs if compute_adjusted and up_to_1h is available - let indexes_to_adjusted_value_created = + let adjusted_value_created = (compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| { let up_to_1h = cfg.up_to_1h_realized.unwrap(); - BinaryBlockSum::from_derived::( + BinaryBlockSum::from_computed::( &cfg.name("adjusted_value_created"), cfg.version, - height_to_value_created.boxed_clone(), - up_to_1h.height_to_value_created.boxed_clone(), - &indexes_to_value_created, - &up_to_1h.indexes_to_value_created, + &value_created, + &up_to_1h.value_created, ) }); - let indexes_to_adjusted_value_destroyed = + let adjusted_value_destroyed = (compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| { let up_to_1h = cfg.up_to_1h_realized.unwrap(); - BinaryBlockSum::from_derived::( + BinaryBlockSum::from_computed::( &cfg.name("adjusted_value_destroyed"), cfg.version, - height_to_value_destroyed.boxed_clone(), - up_to_1h.height_to_value_destroyed.boxed_clone(), - &indexes_to_value_destroyed, - &up_to_1h.indexes_to_value_destroyed, + &value_destroyed, + &up_to_1h.value_destroyed, ) }); // Create realized_price_extra first so we can reference its ratio for MVRV proxy - let indexes_to_realized_price_extra = ComputedRatioVecsDate::forced_import( + let realized_price_extra = ComputedRatioVecsDate::forced_import( cfg.db, &cfg.name("realized_price"), - Some(&indexes_to_realized_price), + Some(&realized_price), cfg.version + v1, cfg.indexes, extended, @@ -260,21 +225,18 @@ impl RealizedMetrics { // MVRV is a lazy proxy for realized_price_extra.ratio // ratio = close / realized_price = market_cap / realized_cap = MVRV - let indexes_to_mvrv = LazyDateLast::from_source::( + let mvrv = LazyDateLast::from_source::( &cfg.name("mvrv"), cfg.version, - &indexes_to_realized_price_extra.ratio, + &realized_price_extra.ratio, ); Ok(Self { // === Realized Cap === - height_to_realized_cap, - indexes_to_realized_cap, - - indexes_to_realized_price_extra, - indexes_to_realized_price, - indexes_to_mvrv, - indexes_to_realized_cap_rel_to_own_market_cap: extended + realized_cap, + realized_price, + realized_price_extra, + realized_cap_rel_to_own_market_cap: extended .then(|| { ComputedBlockLast::forced_import( cfg.db, @@ -284,30 +246,31 @@ impl RealizedMetrics { ) }) .transpose()?, - indexes_to_realized_cap_30d_delta: ComputedDateLast::forced_import( + realized_cap_30d_delta: ComputedDateLast::forced_import( cfg.db, &cfg.name("realized_cap_30d_delta"), cfg.version, cfg.indexes, )?, + // === MVRV === + mvrv, + // === Realized Profit/Loss === - height_to_realized_profit, - indexes_to_realized_profit, - height_to_realized_loss, - indexes_to_realized_loss, - indexes_to_neg_realized_loss, - indexes_to_net_realized_pnl, - indexes_to_realized_value, + realized_profit, + realized_loss, + neg_realized_loss, + net_realized_pnl, + realized_value, // === Realized vs Realized Cap Ratios (lazy) === - indexes_to_realized_profit_rel_to_realized_cap, - indexes_to_realized_loss_rel_to_realized_cap, - indexes_to_net_realized_pnl_rel_to_realized_cap, + realized_profit_rel_to_realized_cap, + realized_loss_rel_to_realized_cap, + net_realized_pnl_rel_to_realized_cap, // === Total Realized PnL === - indexes_to_total_realized_pnl, - dateindex_to_realized_profit_to_loss_ratio: extended + total_realized_pnl, + realized_profit_to_loss_ratio: extended .then(|| { EagerVec::forced_import( cfg.db, @@ -318,37 +281,27 @@ impl RealizedMetrics { .transpose()?, // === Value Created/Destroyed === - height_to_value_created, - indexes_to_value_created, - height_to_value_destroyed, - indexes_to_value_destroyed, + value_created, + value_destroyed, // === Adjusted Value (lazy: cohort - up_to_1h) === - indexes_to_adjusted_value_created, - indexes_to_adjusted_value_destroyed, + adjusted_value_created, + adjusted_value_destroyed, // === SOPR === - dateindex_to_sopr: EagerVec::forced_import( - cfg.db, - &cfg.name("sopr"), - cfg.version + v1, - )?, - dateindex_to_sopr_7d_ema: EagerVec::forced_import( - cfg.db, - &cfg.name("sopr_7d_ema"), - cfg.version + v1, - )?, - dateindex_to_sopr_30d_ema: EagerVec::forced_import( + sopr: EagerVec::forced_import(cfg.db, &cfg.name("sopr"), cfg.version + v1)?, + sopr_7d_ema: EagerVec::forced_import(cfg.db, &cfg.name("sopr_7d_ema"), cfg.version + v1)?, + sopr_30d_ema: EagerVec::forced_import( cfg.db, &cfg.name("sopr_30d_ema"), cfg.version + v1, )?, - dateindex_to_adjusted_sopr: compute_adjusted + adjusted_sopr: compute_adjusted .then(|| { EagerVec::forced_import(cfg.db, &cfg.name("adjusted_sopr"), cfg.version + v1) }) .transpose()?, - dateindex_to_adjusted_sopr_7d_ema: compute_adjusted + adjusted_sopr_7d_ema: compute_adjusted .then(|| { EagerVec::forced_import( cfg.db, @@ -357,7 +310,7 @@ impl RealizedMetrics { ) }) .transpose()?, - dateindex_to_adjusted_sopr_30d_ema: compute_adjusted + adjusted_sopr_30d_ema: compute_adjusted .then(|| { EagerVec::forced_import( cfg.db, @@ -368,37 +321,37 @@ impl RealizedMetrics { .transpose()?, // === Sell Side Risk === - dateindex_to_sell_side_risk_ratio: EagerVec::forced_import( + sell_side_risk_ratio: EagerVec::forced_import( cfg.db, &cfg.name("sell_side_risk_ratio"), cfg.version + v1, )?, - dateindex_to_sell_side_risk_ratio_7d_ema: EagerVec::forced_import( + sell_side_risk_ratio_7d_ema: EagerVec::forced_import( cfg.db, &cfg.name("sell_side_risk_ratio_7d_ema"), cfg.version + v1, )?, - dateindex_to_sell_side_risk_ratio_30d_ema: EagerVec::forced_import( + sell_side_risk_ratio_30d_ema: EagerVec::forced_import( cfg.db, &cfg.name("sell_side_risk_ratio_30d_ema"), cfg.version + v1, )?, // === Net Realized PnL Deltas === - indexes_to_net_realized_pnl_cumulative_30d_delta: ComputedDateLast::forced_import( + net_realized_pnl_cumulative_30d_delta: ComputedDateLast::forced_import( cfg.db, &cfg.name("net_realized_pnl_cumulative_30d_delta"), cfg.version + v3, cfg.indexes, )?, - indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: ComputedDateLast::forced_import( cfg.db, &cfg.name("net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap"), cfg.version + v3, cfg.indexes, )?, - indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: ComputedDateLast::forced_import( cfg.db, &cfg.name("net_realized_pnl_cumulative_30d_delta_rel_to_market_cap"), @@ -410,25 +363,29 @@ impl RealizedMetrics { /// Get minimum length across height-indexed vectors written in block loop. pub fn min_stateful_height_len(&self) -> usize { - self.height_to_realized_cap + self.realized_cap + .height .len() - .min(self.height_to_realized_profit.len()) - .min(self.height_to_realized_loss.len()) - .min(self.height_to_value_created.len()) - .min(self.height_to_value_destroyed.len()) + .min(self.realized_profit.height.len()) + .min(self.realized_loss.height.len()) + .min(self.value_created.height.len()) + .min(self.value_destroyed.height.len()) } /// Push realized state values to height-indexed vectors. pub fn truncate_push(&mut self, height: Height, state: &RealizedState) -> Result<()> { - self.height_to_realized_cap - .truncate_push(height, state.cap)?; - self.height_to_realized_profit + self.realized_cap.height.truncate_push(height, state.cap)?; + self.realized_profit + .height .truncate_push(height, state.profit)?; - self.height_to_realized_loss + self.realized_loss + .height .truncate_push(height, state.loss)?; - self.height_to_value_created + self.value_created + .height .truncate_push(height, state.value_created)?; - self.height_to_value_destroyed + self.value_destroyed + .height .truncate_push(height, state.value_destroyed)?; Ok(()) @@ -436,22 +393,22 @@ impl RealizedMetrics { /// Write height-indexed vectors to disk. pub fn write(&mut self) -> Result<()> { - self.height_to_realized_cap.write()?; - self.height_to_realized_profit.write()?; - self.height_to_realized_loss.write()?; - self.height_to_value_created.write()?; - self.height_to_value_destroyed.write()?; + self.realized_cap.height.write()?; + self.realized_profit.height.write()?; + self.realized_loss.height.write()?; + self.value_created.height.write()?; + self.value_destroyed.height.write()?; Ok(()) } /// Returns a parallel iterator over all vecs for parallel writing. pub fn par_iter_mut(&mut self) -> impl ParallelIterator { [ - &mut self.height_to_realized_cap as &mut dyn AnyStoredVec, - &mut self.height_to_realized_profit, - &mut self.height_to_realized_loss, - &mut self.height_to_value_created, - &mut self.height_to_value_destroyed, + &mut self.realized_cap.height as &mut dyn AnyStoredVec, + &mut self.realized_profit.height, + &mut self.realized_loss.height, + &mut self.value_created.height, + &mut self.value_destroyed.height, ] .into_par_iter() } @@ -469,43 +426,43 @@ impl RealizedMetrics { others: &[&Self], exit: &Exit, ) -> Result<()> { - self.height_to_realized_cap.compute_sum_of_others( + self.realized_cap.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_realized_cap) + .map(|v| &v.realized_cap.height) .collect::>(), exit, )?; - self.height_to_realized_profit.compute_sum_of_others( + self.realized_profit.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_realized_profit) + .map(|v| &v.realized_profit.height) .collect::>(), exit, )?; - self.height_to_realized_loss.compute_sum_of_others( + self.realized_loss.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_realized_loss) + .map(|v| &v.realized_loss.height) .collect::>(), exit, )?; - self.height_to_value_created.compute_sum_of_others( + self.value_created.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_value_created) + .map(|v| &v.value_created.height) .collect::>(), exit, )?; - self.height_to_value_destroyed.compute_sum_of_others( + self.value_destroyed.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_value_destroyed) + .map(|v| &v.value_destroyed.height) .collect::>(), exit, )?; @@ -520,34 +477,17 @@ impl RealizedMetrics { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_realized_cap.derive_from( - indexes, - starting_indexes, - &self.height_to_realized_cap, - exit, - )?; - - self.indexes_to_realized_profit.derive_from( - indexes, - starting_indexes, - &self.height_to_realized_profit, - exit, - )?; - - self.indexes_to_realized_loss.derive_from( - indexes, - starting_indexes, - &self.height_to_realized_loss, - exit, - )?; + self.realized_cap.compute_rest(indexes, starting_indexes, exit)?; + self.realized_profit.compute_rest(indexes, starting_indexes, exit)?; + self.realized_loss.compute_rest(indexes, starting_indexes, exit)?; // net_realized_pnl = profit - loss - self.indexes_to_net_realized_pnl + self.net_realized_pnl .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_subtract( starting_indexes.height, - &self.height_to_realized_profit, - &self.height_to_realized_loss, + &self.realized_profit.height, + &self.realized_loss.height, exit, )?; Ok(()) @@ -556,30 +496,19 @@ impl RealizedMetrics { // realized_value = profit + loss // Note: total_realized_pnl is a lazy alias to realized_value since both // compute profit + loss with sum aggregation, making them identical. - self.indexes_to_realized_value + self.realized_value .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_add( starting_indexes.height, - &self.height_to_realized_profit, - &self.height_to_realized_loss, + &self.realized_profit.height, + &self.realized_loss.height, exit, )?; Ok(()) })?; - self.indexes_to_value_created.derive_from( - indexes, - starting_indexes, - &self.height_to_value_created, - exit, - )?; - - self.indexes_to_value_destroyed.derive_from( - indexes, - starting_indexes, - &self.height_to_value_destroyed, - exit, - )?; + self.value_created.compute_rest(indexes, starting_indexes, exit)?; + self.value_destroyed.compute_rest(indexes, starting_indexes, exit)?; Ok(()) } @@ -597,11 +526,11 @@ impl RealizedMetrics { exit: &Exit, ) -> Result<()> { // realized_price = realized_cap / supply - self.indexes_to_realized_price + self.realized_price .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_divide( starting_indexes.height, - &self.height_to_realized_cap, + &self.realized_cap.height, height_to_supply, exit, )?; @@ -609,20 +538,20 @@ impl RealizedMetrics { })?; if let Some(price) = price { - self.indexes_to_realized_price_extra.compute_rest( + self.realized_price_extra.compute_rest( price, starting_indexes, exit, - Some(&self.indexes_to_realized_price.dateindex.0), + Some(&self.realized_price.dateindex.0), )?; } // realized_cap_30d_delta - self.indexes_to_realized_cap_30d_delta + self.realized_cap_30d_delta .compute_all(starting_indexes, exit, |vec| { vec.compute_change( starting_indexes.dateindex, - &self.indexes_to_realized_cap.dateindex.0, + &self.realized_cap.dateindex.0, 30, exit, )?; @@ -630,32 +559,24 @@ impl RealizedMetrics { })?; // SOPR = value_created / value_destroyed - self.dateindex_to_sopr.compute_divide( + self.sopr.compute_divide( starting_indexes.dateindex, - &self.indexes_to_value_created.dateindex.0, - &self.indexes_to_value_destroyed.dateindex.0, + &self.value_created.dateindex.0, + &self.value_destroyed.dateindex.0, exit, )?; - self.dateindex_to_sopr_7d_ema.compute_ema( - starting_indexes.dateindex, - &self.dateindex_to_sopr, - 7, - exit, - )?; + self.sopr_7d_ema + .compute_ema(starting_indexes.dateindex, &self.sopr, 7, exit)?; - self.dateindex_to_sopr_30d_ema.compute_ema( - starting_indexes.dateindex, - &self.dateindex_to_sopr, - 30, - exit, - )?; + self.sopr_30d_ema + .compute_ema(starting_indexes.dateindex, &self.sopr, 30, exit)?; // Optional: adjusted SOPR (lazy: cohort - up_to_1h) if let (Some(adjusted_sopr), Some(adj_created), Some(adj_destroyed)) = ( - self.dateindex_to_adjusted_sopr.as_mut(), - self.indexes_to_adjusted_value_created.as_ref(), - self.indexes_to_adjusted_value_destroyed.as_ref(), + self.adjusted_sopr.as_mut(), + self.adjusted_value_created.as_ref(), + self.adjusted_value_destroyed.as_ref(), ) { adjusted_sopr.compute_divide( starting_indexes.dateindex, @@ -664,19 +585,19 @@ impl RealizedMetrics { exit, )?; - if let Some(ema_7d) = self.dateindex_to_adjusted_sopr_7d_ema.as_mut() { + if let Some(ema_7d) = self.adjusted_sopr_7d_ema.as_mut() { ema_7d.compute_ema( starting_indexes.dateindex, - self.dateindex_to_adjusted_sopr.as_ref().unwrap(), + self.adjusted_sopr.as_ref().unwrap(), 7, exit, )?; } - if let Some(ema_30d) = self.dateindex_to_adjusted_sopr_30d_ema.as_mut() { + if let Some(ema_30d) = self.adjusted_sopr_30d_ema.as_mut() { ema_30d.compute_ema( starting_indexes.dateindex, - self.dateindex_to_adjusted_sopr.as_ref().unwrap(), + self.adjusted_sopr.as_ref().unwrap(), 30, exit, )?; @@ -684,33 +605,29 @@ impl RealizedMetrics { } // sell_side_risk_ratio = realized_value / realized_cap - self.dateindex_to_sell_side_risk_ratio.compute_percentage( + self.sell_side_risk_ratio.compute_percentage( starting_indexes.dateindex, - &self.indexes_to_realized_value.dateindex.0, - &self.indexes_to_realized_cap.dateindex.0, + &self.realized_value.dateindex.0, + &self.realized_cap.dateindex.0, exit, )?; - self.dateindex_to_sell_side_risk_ratio_7d_ema.compute_ema( - starting_indexes.dateindex, - &self.dateindex_to_sell_side_risk_ratio, - 7, - exit, - )?; + self.sell_side_risk_ratio_7d_ema + .compute_ema(starting_indexes.dateindex, &self.sell_side_risk_ratio, 7, exit)?; - self.dateindex_to_sell_side_risk_ratio_30d_ema.compute_ema( + self.sell_side_risk_ratio_30d_ema.compute_ema( starting_indexes.dateindex, - &self.dateindex_to_sell_side_risk_ratio, + &self.sell_side_risk_ratio, 30, exit, )?; // Net realized PnL cumulative 30d delta - self.indexes_to_net_realized_pnl_cumulative_30d_delta + self.net_realized_pnl_cumulative_30d_delta .compute_all(starting_indexes, exit, |vec| { vec.compute_change( starting_indexes.dateindex, - &self.indexes_to_net_realized_pnl.dateindex.cumulative.0, + &self.net_realized_pnl.dateindex.cumulative.0, 30, exit, )?; @@ -718,14 +635,12 @@ impl RealizedMetrics { })?; // Relative to realized cap - self.indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap + self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap .compute_all(starting_indexes, exit, |vec| { vec.compute_percentage( starting_indexes.dateindex, - &self - .indexes_to_net_realized_pnl_cumulative_30d_delta - .dateindex, - &self.indexes_to_realized_cap.dateindex.0, + &self.net_realized_pnl_cumulative_30d_delta.dateindex, + &self.realized_cap.dateindex.0, exit, )?; Ok(()) @@ -733,13 +648,11 @@ impl RealizedMetrics { // Relative to market cap if let Some(dateindex_to_market_cap) = dateindex_to_market_cap { - self.indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap + self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap .compute_all(starting_indexes, exit, |vec| { vec.compute_percentage( starting_indexes.dateindex, - &self - .indexes_to_net_realized_pnl_cumulative_30d_delta - .dateindex, + &self.net_realized_pnl_cumulative_30d_delta.dateindex, dateindex_to_market_cap, exit, )?; @@ -749,13 +662,13 @@ impl RealizedMetrics { // Optional: realized_cap_rel_to_own_market_cap if let (Some(rel_vec), Some(height_to_market_cap)) = ( - self.indexes_to_realized_cap_rel_to_own_market_cap.as_mut(), + self.realized_cap_rel_to_own_market_cap.as_mut(), height_to_market_cap, ) { rel_vec.compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_percentage( starting_indexes.height, - &self.height_to_realized_cap, + &self.realized_cap.height, height_to_market_cap, exit, )?; @@ -764,11 +677,11 @@ impl RealizedMetrics { } // Optional: realized_profit_to_loss_ratio - if let Some(ratio) = self.dateindex_to_realized_profit_to_loss_ratio.as_mut() { + if let Some(ratio) = self.realized_profit_to_loss_ratio.as_mut() { ratio.compute_divide( starting_indexes.dateindex, - &self.indexes_to_realized_profit.dateindex.sum.0, - &self.indexes_to_realized_loss.dateindex.sum.0, + &self.realized_profit.dateindex.sum.0, + &self.realized_loss.dateindex.sum.0, exit, )?; } diff --git a/crates/brk_computer/src/distribution/metrics/relative.rs b/crates/brk_computer/src/distribution/metrics/relative.rs index 90c9b0a18..15c7b6cae 100644 --- a/crates/brk_computer/src/distribution/metrics/relative.rs +++ b/crates/brk_computer/src/distribution/metrics/relative.rs @@ -1,10 +1,10 @@ use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Bitcoin, Dollars, Height, Sats, StoredF32, StoredF64, Version}; -use vecdb::{IterableCloneableVec, LazyVecFrom2}; +use brk_types::{Dollars, Sats, StoredF32, StoredF64, Version}; +use vecdb::IterableCloneableVec; use crate::internal::{ - BinaryDateLast, NegPercentageDollarsF32, NegRatio32, PercentageBtcF64, + LazyBinaryBlockLast, LazyBinaryDateLast, NegPercentageDollarsF32, NegRatio32, PercentageDollarsF32, PercentageSatsF64, Ratio32, }; @@ -15,94 +15,49 @@ use super::{ImportConfig, SupplyMetrics, UnrealizedMetrics}; #[derive(Clone, Traversable)] pub struct RelativeMetrics { // === Supply Relative to Circulating Supply (lazy from global supply) === - // KISS: both sources are ComputedVecsDateLast - pub indexes_to_supply_rel_to_circulating_supply: - Option>, + pub supply_rel_to_circulating_supply: Option>, // === Supply in Profit/Loss Relative to Own Supply (lazy) === - pub height_to_supply_in_profit_rel_to_own_supply: - LazyVecFrom2, - pub height_to_supply_in_loss_rel_to_own_supply: - LazyVecFrom2, - // KISS: both unrealized and supply are now KISS types - pub indexes_to_supply_in_profit_rel_to_own_supply: - BinaryDateLast, - pub indexes_to_supply_in_loss_rel_to_own_supply: - BinaryDateLast, + pub supply_in_profit_rel_to_own_supply: LazyBinaryBlockLast, + pub supply_in_loss_rel_to_own_supply: LazyBinaryBlockLast, // === Supply in Profit/Loss Relative to Circulating Supply (lazy from global supply) === - pub height_to_supply_in_profit_rel_to_circulating_supply: - Option>, - pub height_to_supply_in_loss_rel_to_circulating_supply: - Option>, - // KISS: both unrealized and global_supply are now KISS types - pub indexes_to_supply_in_profit_rel_to_circulating_supply: - Option>, - pub indexes_to_supply_in_loss_rel_to_circulating_supply: - Option>, + pub supply_in_profit_rel_to_circulating_supply: + Option>, + pub supply_in_loss_rel_to_circulating_supply: + Option>, // === Unrealized vs Market Cap (lazy from global market cap) === - pub height_to_unrealized_profit_rel_to_market_cap: - Option>, - pub height_to_unrealized_loss_rel_to_market_cap: - Option>, - pub height_to_neg_unrealized_loss_rel_to_market_cap: - Option>, - pub height_to_net_unrealized_pnl_rel_to_market_cap: - Option>, - // KISS: DerivedDateLast + ComputedVecsDateLast - pub indexes_to_unrealized_profit_rel_to_market_cap: - Option>, - pub indexes_to_unrealized_loss_rel_to_market_cap: - Option>, - pub indexes_to_neg_unrealized_loss_rel_to_market_cap: - Option>, - // KISS: both ComputedVecsDateLast - pub indexes_to_net_unrealized_pnl_rel_to_market_cap: - Option>, + pub unrealized_profit_rel_to_market_cap: + Option>, + pub unrealized_loss_rel_to_market_cap: Option>, + pub neg_unrealized_loss_rel_to_market_cap: + Option>, + pub net_unrealized_pnl_rel_to_market_cap: + Option>, // === NUPL (Net Unrealized Profit/Loss) === - // KISS: both ComputedVecsDateLast - pub indexes_to_nupl: Option>, + pub nupl: Option>, // === Unrealized vs Own Market Cap (lazy) === - pub height_to_unrealized_profit_rel_to_own_market_cap: - Option>, - pub height_to_unrealized_loss_rel_to_own_market_cap: - Option>, - pub height_to_neg_unrealized_loss_rel_to_own_market_cap: - Option>, - pub height_to_net_unrealized_pnl_rel_to_own_market_cap: - Option>, - // KISS: DerivedDateLast + ComputedVecsDateLast - pub indexes_to_unrealized_profit_rel_to_own_market_cap: - Option>, - pub indexes_to_unrealized_loss_rel_to_own_market_cap: - Option>, - pub indexes_to_neg_unrealized_loss_rel_to_own_market_cap: - Option>, - // KISS: both ComputedVecsDateLast - pub indexes_to_net_unrealized_pnl_rel_to_own_market_cap: - Option>, + pub unrealized_profit_rel_to_own_market_cap: + Option>, + pub unrealized_loss_rel_to_own_market_cap: + Option>, + pub neg_unrealized_loss_rel_to_own_market_cap: + Option>, + pub net_unrealized_pnl_rel_to_own_market_cap: + Option>, // === Unrealized vs Own Total Unrealized PnL (lazy) === - pub height_to_unrealized_profit_rel_to_own_total_unrealized_pnl: - Option>, - pub height_to_unrealized_loss_rel_to_own_total_unrealized_pnl: - Option>, - pub height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: - Option>, - pub height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: - Option>, - // KISS: DerivedDateLast + DerivedDateLast - pub indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl: - Option>, - pub indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl: - Option>, - pub indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: - Option>, - pub indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: - Option>, + pub unrealized_profit_rel_to_own_total_unrealized_pnl: + Option>, + pub unrealized_loss_rel_to_own_total_unrealized_pnl: + Option>, + pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: + Option>, + pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: + Option>, } impl RelativeMetrics { @@ -122,300 +77,222 @@ impl RelativeMetrics { let compute_rel_to_all = cfg.compute_rel_to_all(); // Global sources from "all" cohort - let global_supply_sats = all_supply.map(|s| &s.indexes_to_supply.sats); - let global_supply_sats_dateindex = all_supply.map(|s| &s.indexes_to_supply.sats_dateindex); - let global_supply_btc = all_supply.map(|s| &s.height_to_supply_value.bitcoin); - let global_market_cap = all_supply.and_then(|s| s.indexes_to_supply.dollars.as_ref()); - let global_market_cap_height = - all_supply.and_then(|s| s.height_to_supply_value.dollars.as_ref()); + let global_supply_sats_height = all_supply.map(|s| &s.supply.sats.height); + let global_supply_sats_difficultyepoch = all_supply.map(|s| &s.supply.sats.difficultyepoch); + let global_supply_sats_dates = all_supply.map(|s| &s.supply.sats.rest.dates); + let global_supply_sats_dateindex = all_supply.map(|s| &s.supply.sats.rest.dateindex); + let global_market_cap = all_supply.and_then(|s| s.supply.dollars.as_ref()); // Own market cap source - let own_market_cap = supply.indexes_to_supply.dollars.as_ref(); - let own_market_cap_height = supply.height_to_supply_value.dollars.as_ref(); + let own_market_cap = supply.supply.dollars.as_ref(); Ok(Self { // === Supply Relative to Circulating Supply (lazy from global supply) === - indexes_to_supply_rel_to_circulating_supply: (compute_rel_to_all - && global_supply_sats.is_some()) + supply_rel_to_circulating_supply: (compute_rel_to_all + && global_supply_sats_dates.is_some()) .then(|| { - BinaryDateLast::from_both_derived_last::( + LazyBinaryDateLast::from_both_derived_last::( &cfg.name("supply_rel_to_circulating_supply"), cfg.version + v1, - supply.indexes_to_supply.sats_dateindex.boxed_clone(), - &supply.indexes_to_supply.sats, + supply.supply.sats.rest.dateindex.boxed_clone(), + &supply.supply.sats.rest.dates, global_supply_sats_dateindex.unwrap().boxed_clone(), - global_supply_sats.unwrap(), + global_supply_sats_dates.unwrap(), ) }), // === Supply in Profit/Loss Relative to Own Supply (lazy) === - height_to_supply_in_profit_rel_to_own_supply: LazyVecFrom2::transformed::< - PercentageBtcF64, - >( - &cfg.name("supply_in_profit_rel_to_own_supply"), - cfg.version + v1, - unrealized - .height_to_supply_in_profit_value - .bitcoin - .boxed_clone(), - supply.height_to_supply_value.bitcoin.boxed_clone(), - ), - height_to_supply_in_loss_rel_to_own_supply: LazyVecFrom2::transformed::( - &cfg.name("supply_in_loss_rel_to_own_supply"), - cfg.version + v1, - unrealized - .height_to_supply_in_loss_value - .bitcoin - .boxed_clone(), - supply.height_to_supply_value.bitcoin.boxed_clone(), - ), - indexes_to_supply_in_profit_rel_to_own_supply: - BinaryDateLast::from_both_derived_last::( + supply_in_profit_rel_to_own_supply: + LazyBinaryBlockLast::from_height_difficultyepoch_dates::( &cfg.name("supply_in_profit_rel_to_own_supply"), cfg.version + v1, - unrealized.dateindex_to_supply_in_profit.boxed_clone(), - &unrealized.indexes_to_supply_in_profit.sats, - supply.indexes_to_supply.sats_dateindex.boxed_clone(), - &supply.indexes_to_supply.sats, + unrealized.supply_in_profit.height.boxed_clone(), + supply.supply.sats.height.boxed_clone(), + unrealized.supply_in_profit.difficultyepoch.boxed_clone(), + supply.supply.sats.difficultyepoch.boxed_clone(), + unrealized + .supply_in_profit + .indexes + .sats_dateindex + .boxed_clone(), + &unrealized.supply_in_profit.indexes.sats, + supply.supply.sats.rest.dateindex.boxed_clone(), + &supply.supply.sats.rest.dates, ), - indexes_to_supply_in_loss_rel_to_own_supply: - BinaryDateLast::from_both_derived_last::( + supply_in_loss_rel_to_own_supply: + LazyBinaryBlockLast::from_height_difficultyepoch_dates::( &cfg.name("supply_in_loss_rel_to_own_supply"), cfg.version + v1, - unrealized.dateindex_to_supply_in_loss.boxed_clone(), - &unrealized.indexes_to_supply_in_loss.sats, - supply.indexes_to_supply.sats_dateindex.boxed_clone(), - &supply.indexes_to_supply.sats, + unrealized.supply_in_loss.height.boxed_clone(), + supply.supply.sats.height.boxed_clone(), + unrealized.supply_in_loss.difficultyepoch.boxed_clone(), + supply.supply.sats.difficultyepoch.boxed_clone(), + unrealized + .supply_in_loss + .indexes + .sats_dateindex + .boxed_clone(), + &unrealized.supply_in_loss.indexes.sats, + supply.supply.sats.rest.dateindex.boxed_clone(), + &supply.supply.sats.rest.dates, ), // === Supply in Profit/Loss Relative to Circulating Supply (lazy from global supply) === - height_to_supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all - && global_supply_btc.is_some()) + supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all + && global_supply_sats_height.is_some()) .then(|| { - LazyVecFrom2::transformed::( + LazyBinaryBlockLast::from_height_difficultyepoch_dates::( &cfg.name("supply_in_profit_rel_to_circulating_supply"), cfg.version + v1, + unrealized.supply_in_profit.height.boxed_clone(), + global_supply_sats_height.unwrap().boxed_clone(), + unrealized.supply_in_profit.difficultyepoch.boxed_clone(), + global_supply_sats_difficultyepoch.unwrap().boxed_clone(), unrealized - .height_to_supply_in_profit_value - .bitcoin + .supply_in_profit + .indexes + .sats_dateindex .boxed_clone(), - global_supply_btc.unwrap().boxed_clone(), + &unrealized.supply_in_profit.indexes.sats, + global_supply_sats_dateindex.unwrap().boxed_clone(), + global_supply_sats_dates.unwrap(), ) }), - height_to_supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all - && global_supply_btc.is_some()) + supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all + && global_supply_sats_height.is_some()) .then(|| { - LazyVecFrom2::transformed::( + LazyBinaryBlockLast::from_height_difficultyepoch_dates::( &cfg.name("supply_in_loss_rel_to_circulating_supply"), cfg.version + v1, + unrealized.supply_in_loss.height.boxed_clone(), + global_supply_sats_height.unwrap().boxed_clone(), + unrealized.supply_in_loss.difficultyepoch.boxed_clone(), + global_supply_sats_difficultyepoch.unwrap().boxed_clone(), unrealized - .height_to_supply_in_loss_value - .bitcoin + .supply_in_loss + .indexes + .sats_dateindex .boxed_clone(), - global_supply_btc.unwrap().boxed_clone(), - ) - }), - indexes_to_supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all - && global_supply_sats.is_some()) - .then(|| { - BinaryDateLast::from_both_derived_last::( - &cfg.name("supply_in_profit_rel_to_circulating_supply"), - cfg.version + v1, - unrealized.dateindex_to_supply_in_profit.boxed_clone(), - &unrealized.indexes_to_supply_in_profit.sats, + &unrealized.supply_in_loss.indexes.sats, global_supply_sats_dateindex.unwrap().boxed_clone(), - global_supply_sats.unwrap(), - ) - }), - indexes_to_supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all - && global_supply_sats.is_some()) - .then(|| { - BinaryDateLast::from_both_derived_last::( - &cfg.name("supply_in_loss_rel_to_circulating_supply"), - cfg.version + v1, - unrealized.dateindex_to_supply_in_loss.boxed_clone(), - &unrealized.indexes_to_supply_in_loss.sats, - global_supply_sats_dateindex.unwrap().boxed_clone(), - global_supply_sats.unwrap(), + global_supply_sats_dates.unwrap(), ) }), // === Unrealized vs Market Cap (lazy from global market cap) === - height_to_unrealized_profit_rel_to_market_cap: global_market_cap_height.map(|mc| { - LazyVecFrom2::transformed::( - &cfg.name("unrealized_profit_rel_to_market_cap"), - cfg.version, - unrealized.height_to_unrealized_profit.boxed_clone(), - mc.boxed_clone(), - ) - }), - height_to_unrealized_loss_rel_to_market_cap: global_market_cap_height.map(|mc| { - LazyVecFrom2::transformed::( - &cfg.name("unrealized_loss_rel_to_market_cap"), - cfg.version, - unrealized.height_to_unrealized_loss.boxed_clone(), - mc.boxed_clone(), - ) - }), - height_to_neg_unrealized_loss_rel_to_market_cap: global_market_cap_height.map(|mc| { - LazyVecFrom2::transformed::( - &cfg.name("neg_unrealized_loss_rel_to_market_cap"), - cfg.version, - unrealized.height_to_unrealized_loss.boxed_clone(), - mc.boxed_clone(), - ) - }), - height_to_net_unrealized_pnl_rel_to_market_cap: global_market_cap_height.map(|mc| { - LazyVecFrom2::transformed::( - &cfg.name("net_unrealized_pnl_rel_to_market_cap"), - cfg.version + v1, - unrealized.height_to_net_unrealized_pnl.boxed_clone(), - mc.boxed_clone(), - ) - }), - // KISS: market_cap is now ComputedVecsDateLast - indexes_to_unrealized_profit_rel_to_market_cap: global_market_cap.map(|mc| { - BinaryDateLast::from_derived_last_and_computed_last::( - &cfg.name("unrealized_profit_rel_to_market_cap"), - cfg.version + v2, - unrealized.dateindex_to_unrealized_profit.boxed_clone(), - &unrealized.indexes_to_unrealized_profit, - mc, - ) - }), - indexes_to_unrealized_loss_rel_to_market_cap: global_market_cap.map(|mc| { - BinaryDateLast::from_derived_last_and_computed_last::( - &cfg.name("unrealized_loss_rel_to_market_cap"), - cfg.version + v2, - unrealized.dateindex_to_unrealized_loss.boxed_clone(), - &unrealized.indexes_to_unrealized_loss, - mc, - ) - }), - indexes_to_neg_unrealized_loss_rel_to_market_cap: global_market_cap.map(|mc| { - BinaryDateLast::from_derived_last_and_computed_last::( + unrealized_profit_rel_to_market_cap: + global_market_cap.map(|mc| { + LazyBinaryBlockLast::from_computed_height_date_and_block_last::< + PercentageDollarsF32, + >( + &cfg.name("unrealized_profit_rel_to_market_cap"), + cfg.version + v2, + &unrealized.unrealized_profit, + mc, + ) + }), + unrealized_loss_rel_to_market_cap: + global_market_cap.map(|mc| { + LazyBinaryBlockLast::from_computed_height_date_and_block_last::< + PercentageDollarsF32, + >( + &cfg.name("unrealized_loss_rel_to_market_cap"), + cfg.version + v2, + &unrealized.unrealized_loss, + mc, + ) + }), + neg_unrealized_loss_rel_to_market_cap: global_market_cap.map(|mc| { + LazyBinaryBlockLast::from_computed_height_date_and_block_last::< + NegPercentageDollarsF32, + >( &cfg.name("neg_unrealized_loss_rel_to_market_cap"), cfg.version + v2, - unrealized.dateindex_to_unrealized_loss.boxed_clone(), - &unrealized.indexes_to_unrealized_loss, + &unrealized.unrealized_loss, mc, ) }), - indexes_to_net_unrealized_pnl_rel_to_market_cap: global_market_cap.map(|mc| { - BinaryDateLast::from_computed_both_last::( + net_unrealized_pnl_rel_to_market_cap: global_market_cap.map(|mc| { + LazyBinaryBlockLast::from_binary_block_and_computed_block_last::< + PercentageDollarsF32, + _, + _, + >( &cfg.name("net_unrealized_pnl_rel_to_market_cap"), cfg.version + v2, - &unrealized.indexes_to_net_unrealized_pnl, + &unrealized.net_unrealized_pnl, mc, ) }), // NUPL is a proxy for net_unrealized_pnl_rel_to_market_cap - indexes_to_nupl: global_market_cap.map(|mc| { - BinaryDateLast::from_computed_both_last::( + nupl: global_market_cap.map(|mc| { + LazyBinaryBlockLast::from_binary_block_and_computed_block_last::< + PercentageDollarsF32, + _, + _, + >( &cfg.name("nupl"), cfg.version + v2, - &unrealized.indexes_to_net_unrealized_pnl, + &unrealized.net_unrealized_pnl, mc, ) }), // === Unrealized vs Own Market Cap (lazy, optional) === - height_to_unrealized_profit_rel_to_own_market_cap: (extended && compute_rel_to_all) - .then(|| { - own_market_cap_height.map(|mc| { - LazyVecFrom2::transformed::( - &cfg.name("unrealized_profit_rel_to_own_market_cap"), - cfg.version + v1, - unrealized.height_to_unrealized_profit.boxed_clone(), - mc.boxed_clone(), - ) - }) - }) - .flatten(), - height_to_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all) - .then(|| { - own_market_cap_height.map(|mc| { - LazyVecFrom2::transformed::( - &cfg.name("unrealized_loss_rel_to_own_market_cap"), - cfg.version + v1, - unrealized.height_to_unrealized_loss.boxed_clone(), - mc.boxed_clone(), - ) - }) - }) - .flatten(), - height_to_neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all) - .then(|| { - own_market_cap_height.map(|mc| { - LazyVecFrom2::transformed::( - &cfg.name("neg_unrealized_loss_rel_to_own_market_cap"), - cfg.version + v1, - unrealized.height_to_unrealized_loss.boxed_clone(), - mc.boxed_clone(), - ) - }) - }) - .flatten(), - height_to_net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all) - .then(|| { - own_market_cap_height.map(|mc| { - LazyVecFrom2::transformed::( - &cfg.name("net_unrealized_pnl_rel_to_own_market_cap"), - cfg.version + v2, - unrealized.height_to_net_unrealized_pnl.boxed_clone(), - mc.boxed_clone(), - ) - }) - }) - .flatten(), - // KISS: own_market_cap is now ComputedVecsDateLast - indexes_to_unrealized_profit_rel_to_own_market_cap: (extended && compute_rel_to_all) + unrealized_profit_rel_to_own_market_cap: (extended && compute_rel_to_all) .then(|| { own_market_cap.map(|mc| { - BinaryDateLast::from_derived_last_and_computed_last::( + LazyBinaryBlockLast::from_computed_height_date_and_block_last::< + PercentageDollarsF32, + >( &cfg.name("unrealized_profit_rel_to_own_market_cap"), cfg.version + v2, - unrealized.dateindex_to_unrealized_profit.boxed_clone(), - &unrealized.indexes_to_unrealized_profit, + &unrealized.unrealized_profit, mc, ) }) }) .flatten(), - indexes_to_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all) + unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all) .then(|| { own_market_cap.map(|mc| { - BinaryDateLast::from_derived_last_and_computed_last::( + LazyBinaryBlockLast::from_computed_height_date_and_block_last::< + PercentageDollarsF32, + >( &cfg.name("unrealized_loss_rel_to_own_market_cap"), cfg.version + v2, - unrealized.dateindex_to_unrealized_loss.boxed_clone(), - &unrealized.indexes_to_unrealized_loss, + &unrealized.unrealized_loss, mc, ) }) }) .flatten(), - indexes_to_neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all) + neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all) .then(|| { own_market_cap.map(|mc| { - BinaryDateLast::from_derived_last_and_computed_last::( + LazyBinaryBlockLast::from_computed_height_date_and_block_last::< + NegPercentageDollarsF32, + >( &cfg.name("neg_unrealized_loss_rel_to_own_market_cap"), cfg.version + v2, - unrealized.dateindex_to_unrealized_loss.boxed_clone(), - &unrealized.indexes_to_unrealized_loss, + &unrealized.unrealized_loss, mc, ) }) }) .flatten(), - indexes_to_net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all) + net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all) .then(|| { own_market_cap.map(|mc| { - BinaryDateLast::from_computed_both_last::( + LazyBinaryBlockLast::from_binary_block_and_computed_block_last::< + PercentageDollarsF32, + _, + _, + >( &cfg.name("net_unrealized_pnl_rel_to_own_market_cap"), cfg.version + v2, - &unrealized.indexes_to_net_unrealized_pnl, + &unrealized.net_unrealized_pnl, mc, ) }) @@ -423,71 +300,36 @@ impl RelativeMetrics { .flatten(), // === Unrealized vs Own Total Unrealized PnL (lazy, optional) === - height_to_unrealized_profit_rel_to_own_total_unrealized_pnl: extended.then(|| { - LazyVecFrom2::transformed::( + unrealized_profit_rel_to_own_total_unrealized_pnl: extended.then(|| { + LazyBinaryBlockLast::from_computed_height_date_and_binary_block::( &cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"), cfg.version, - unrealized.height_to_unrealized_profit.boxed_clone(), - unrealized.height_to_total_unrealized_pnl.boxed_clone(), + &unrealized.unrealized_profit, + &unrealized.total_unrealized_pnl, ) }), - height_to_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| { - LazyVecFrom2::transformed::( + unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| { + LazyBinaryBlockLast::from_computed_height_date_and_binary_block::( &cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"), cfg.version, - unrealized.height_to_unrealized_loss.boxed_clone(), - unrealized.height_to_total_unrealized_pnl.boxed_clone(), + &unrealized.unrealized_loss, + &unrealized.total_unrealized_pnl, ) }), - height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| { - LazyVecFrom2::transformed::( + neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| { + LazyBinaryBlockLast::from_computed_height_date_and_binary_block::( &cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"), cfg.version, - unrealized.height_to_unrealized_loss.boxed_clone(), - unrealized.height_to_total_unrealized_pnl.boxed_clone(), + &unrealized.unrealized_loss, + &unrealized.total_unrealized_pnl, ) }), - height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: extended.then(|| { - LazyVecFrom2::transformed::( + net_unrealized_pnl_rel_to_own_total_unrealized_pnl: extended.then(|| { + LazyBinaryBlockLast::from_both_binary_block::( &cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"), cfg.version + v1, - unrealized.height_to_net_unrealized_pnl.boxed_clone(), - unrealized.height_to_total_unrealized_pnl.boxed_clone(), - ) - }), - indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl: extended.then(|| { - BinaryDateLast::from_derived_last_and_computed_last::( - &cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"), - cfg.version + v1, - unrealized.dateindex_to_unrealized_profit.boxed_clone(), - &unrealized.indexes_to_unrealized_profit, - &unrealized.indexes_to_total_unrealized_pnl, - ) - }), - indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| { - BinaryDateLast::from_derived_last_and_computed_last::( - &cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"), - cfg.version + v1, - unrealized.dateindex_to_unrealized_loss.boxed_clone(), - &unrealized.indexes_to_unrealized_loss, - &unrealized.indexes_to_total_unrealized_pnl, - ) - }), - indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| { - BinaryDateLast::from_derived_last_and_computed_last::( - &cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"), - cfg.version + v1, - unrealized.dateindex_to_unrealized_loss.boxed_clone(), - &unrealized.indexes_to_unrealized_loss, - &unrealized.indexes_to_total_unrealized_pnl, - ) - }), - indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: extended.then(|| { - BinaryDateLast::from_computed_both_last::( - &cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"), - cfg.version + v1, - &unrealized.indexes_to_net_unrealized_pnl, - &unrealized.indexes_to_total_unrealized_pnl, + &unrealized.net_unrealized_pnl, + &unrealized.total_unrealized_pnl, ) }), }) diff --git a/crates/brk_computer/src/distribution/metrics/supply.rs b/crates/brk_computer/src/distribution/metrics/supply.rs index c40ff62a9..ccaba7231 100644 --- a/crates/brk_computer/src/distribution/metrics/supply.rs +++ b/crates/brk_computer/src/distribution/metrics/supply.rs @@ -1,131 +1,89 @@ use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Height, Sats, StoredU64, SupplyState, Version}; +use brk_types::{Height, Sats, Version}; + +use crate::ComputeIndexes; use rayon::prelude::*; -use vecdb::{ - AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, - PcoVec, TypedVecIterator, -}; +use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, IterableCloneableVec}; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{ - DerivedComputedBlockLast, HalfClosePriceTimesSats, HalveDollars, HalveSats, - HalveSatsToBitcoin, LazyBlockValue, LazyDerivedBlockValue, LazyValueDateLast, ValueDateLast, + HalfClosePriceTimesSats, HalveDollars, HalveSats, HalveSatsToBitcoin, LazyBlockValue, + LazyValueDateLast, ValueBlockLast, }, price, }; use super::ImportConfig; -/// Supply and UTXO count metrics for a cohort. +/// Supply metrics for a cohort. #[derive(Clone, Traversable)] pub struct SupplyMetrics { - pub height_to_supply: EagerVec>, - pub height_to_supply_value: LazyDerivedBlockValue, - pub indexes_to_supply: ValueDateLast, - pub height_to_utxo_count: EagerVec>, - pub indexes_to_utxo_count: DerivedComputedBlockLast, - pub height_to_supply_half_value: LazyBlockValue, - pub indexes_to_supply_half: LazyValueDateLast, + pub supply: ValueBlockLast, + pub supply_half_value: LazyBlockValue, + pub supply_half: LazyValueDateLast, } impl SupplyMetrics { /// Import supply metrics from database. pub fn forced_import(cfg: &ImportConfig) -> Result { - let v1 = Version::ONE; let compute_dollars = cfg.compute_dollars(); - let height_to_supply: EagerVec> = - EagerVec::forced_import(cfg.db, &cfg.name("supply"), cfg.version)?; + let supply = ValueBlockLast::forced_import( + cfg.db, + &cfg.name("supply"), + cfg.version, + cfg.indexes, + compute_dollars, + )?; let price_source = cfg .price - .map(|p| p.usd.chainindexes_to_price_close.height.boxed_clone()); - - let height_to_supply_value = LazyDerivedBlockValue::from_source( - &cfg.name("supply"), - height_to_supply.boxed_clone(), - cfg.version, - price_source.clone(), - ); - - let indexes_to_supply = ValueDateLast::forced_import( - cfg.db, - &cfg.name("supply"), - cfg.version + v1, - compute_dollars, - cfg.indexes, - )?; + .map(|p| p.usd.split.close.height.boxed_clone()); // Create lazy supply_half from supply sources - let height_to_supply_half_value = LazyBlockValue::from_sources::< - HalveSats, - HalveSatsToBitcoin, - HalfClosePriceTimesSats, - >( - &cfg.name("supply_half"), - height_to_supply.boxed_clone(), - price_source, - cfg.version, - ); - - let indexes_to_supply_half = - LazyValueDateLast::from_source::( + let supply_half_value = + LazyBlockValue::from_sources::( &cfg.name("supply_half"), - &indexes_to_supply, + supply.sats.height.boxed_clone(), + price_source, cfg.version, ); - let height_to_utxo_count = - EagerVec::forced_import(cfg.db, &cfg.name("utxo_count"), cfg.version)?; + let supply_half = LazyValueDateLast::from_block_source::< + HalveSats, + HalveSatsToBitcoin, + HalveDollars, + >(&cfg.name("supply_half"), &supply, cfg.version); Ok(Self { - indexes_to_utxo_count: DerivedComputedBlockLast::forced_import( - cfg.db, - &cfg.name("utxo_count"), - height_to_utxo_count.boxed_clone(), - cfg.version, - cfg.indexes, - )?, - height_to_supply, - height_to_supply_value, - indexes_to_supply, - height_to_utxo_count, - height_to_supply_half_value, - indexes_to_supply_half, + supply, + supply_half_value, + supply_half, }) } /// Get minimum length across height-indexed vectors. pub fn min_len(&self) -> usize { - self.height_to_supply - .len() - .min(self.height_to_utxo_count.len()) + self.supply.sats.height.len() } /// Push supply state values to height-indexed vectors. - pub fn truncate_push(&mut self, height: Height, state: &SupplyState) -> Result<()> { - self.height_to_supply.truncate_push(height, state.value)?; - self.height_to_utxo_count - .truncate_push(height, StoredU64::from(state.utxo_count))?; + pub fn truncate_push(&mut self, height: Height, supply: Sats) -> Result<()> { + self.supply.sats.height.truncate_push(height, supply)?; Ok(()) } /// Write height-indexed vectors to disk. pub fn write(&mut self) -> Result<()> { - self.height_to_supply.write()?; - self.height_to_utxo_count.write()?; + self.supply.sats.height.write()?; Ok(()) } /// Returns a parallel iterator over all vecs for parallel writing. pub fn par_iter_mut(&mut self) -> impl ParallelIterator { - vec![ - &mut self.height_to_supply as &mut dyn AnyStoredVec, - &mut self.height_to_utxo_count as &mut dyn AnyStoredVec, - ] - .into_par_iter() + vec![&mut self.supply.sats.height as &mut dyn AnyStoredVec].into_par_iter() } /// Validate computed versions against base version. @@ -141,26 +99,18 @@ impl SupplyMetrics { others: &[&Self], exit: &Exit, ) -> Result<()> { - self.height_to_supply.compute_sum_of_others( + self.supply.sats.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_supply) - .collect::>(), - exit, - )?; - self.height_to_utxo_count.compute_sum_of_others( - starting_indexes.height, - &others - .iter() - .map(|v| &v.height_to_utxo_count) + .map(|v| &v.supply.sats.height) .collect::>(), exit, )?; Ok(()) } - /// First phase of computed metrics (indexes from height). + /// Compute derived vecs from existing height data. pub fn compute_rest_part1( &mut self, indexes: &indexes::Vecs, @@ -168,34 +118,7 @@ impl SupplyMetrics { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_supply - .compute_all(price, starting_indexes, exit, |v| { - let mut dateindex_to_height_count_iter = - indexes.time.dateindex_to_height_count.into_iter(); - let mut height_to_supply_iter = self.height_to_supply.into_iter(); - v.compute_transform( - starting_indexes.dateindex, - &indexes.time.dateindex_to_first_height, - |(i, height, ..)| { - let count = dateindex_to_height_count_iter.get_unwrap(i); - if count == StoredU64::default() { - unreachable!() - } - let supply = height_to_supply_iter.get_unwrap(height + (*count - 1)); - (i, supply) - }, - exit, - )?; - Ok(()) - })?; - - self.indexes_to_utxo_count.derive_from( - indexes, - starting_indexes, - &self.height_to_utxo_count, - exit, - )?; - - Ok(()) + self.supply + .compute_rest(indexes, price, starting_indexes, exit) } } diff --git a/crates/brk_computer/src/distribution/metrics/unrealized.rs b/crates/brk_computer/src/distribution/metrics/unrealized.rs index 090cf02bf..b772795c2 100644 --- a/crates/brk_computer/src/distribution/metrics/unrealized.rs +++ b/crates/brk_computer/src/distribution/metrics/unrealized.rs @@ -1,18 +1,15 @@ use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{DateIndex, Dollars, Height, Sats}; +use brk_types::{DateIndex, Dollars, Height}; use rayon::prelude::*; -use vecdb::{ - AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, - LazyVecFrom1, LazyVecFrom2, Negate, PcoVec, -}; +use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, Negate}; use crate::{ ComputeIndexes, distribution::state::UnrealizedState, internal::{ - ComputedDateLast, DerivedDateLast, DollarsMinus, DollarsPlus, LazyDateLast, - LazyDerivedBlockValue, ValueDerivedDateLast, + ComputedHeightDateLast, DollarsMinus, DollarsPlus, LazyBinaryBlockLast, LazyBlockLast, + ValueBlockDateLast, }, }; @@ -22,36 +19,19 @@ use super::ImportConfig; #[derive(Clone, Traversable)] pub struct UnrealizedMetrics { // === Supply in Profit/Loss === - pub height_to_supply_in_profit: EagerVec>, - pub indexes_to_supply_in_profit: ValueDerivedDateLast, - pub height_to_supply_in_loss: EagerVec>, - pub indexes_to_supply_in_loss: ValueDerivedDateLast, - pub dateindex_to_supply_in_profit: EagerVec>, - pub dateindex_to_supply_in_loss: EagerVec>, - pub height_to_supply_in_profit_value: LazyDerivedBlockValue, - pub height_to_supply_in_loss_value: LazyDerivedBlockValue, + pub supply_in_profit: ValueBlockDateLast, + pub supply_in_loss: ValueBlockDateLast, // === Unrealized Profit/Loss === - pub height_to_unrealized_profit: EagerVec>, - pub indexes_to_unrealized_profit: DerivedDateLast, - pub height_to_unrealized_loss: EagerVec>, - pub indexes_to_unrealized_loss: DerivedDateLast, - pub dateindex_to_unrealized_profit: EagerVec>, - pub dateindex_to_unrealized_loss: EagerVec>, + pub unrealized_profit: ComputedHeightDateLast, + pub unrealized_loss: ComputedHeightDateLast, - // === Negated and Net === - pub height_to_neg_unrealized_loss: LazyVecFrom1, - pub indexes_to_neg_unrealized_loss: LazyDateLast, + // === Negated === + pub neg_unrealized_loss: LazyBlockLast, - // net = profit - loss (height is lazy, indexes computed) - pub height_to_net_unrealized_pnl: - LazyVecFrom2, - pub indexes_to_net_unrealized_pnl: ComputedDateLast, - - // total = profit + loss (height is lazy, indexes computed) - pub height_to_total_unrealized_pnl: - LazyVecFrom2, - pub indexes_to_total_unrealized_pnl: ComputedDateLast, + // === Net and Total === + pub net_unrealized_pnl: LazyBinaryBlockLast, + pub total_unrealized_pnl: LazyBinaryBlockLast, } impl UnrealizedMetrics { @@ -59,154 +39,89 @@ impl UnrealizedMetrics { pub fn forced_import(cfg: &ImportConfig) -> Result { let compute_dollars = cfg.compute_dollars(); - let dateindex_to_supply_in_profit = - EagerVec::forced_import(cfg.db, &cfg.name("supply_in_profit"), cfg.version)?; - let dateindex_to_supply_in_loss = - EagerVec::forced_import(cfg.db, &cfg.name("supply_in_loss"), cfg.version)?; - let dateindex_to_unrealized_profit = - EagerVec::forced_import(cfg.db, &cfg.name("unrealized_profit"), cfg.version)?; - let dateindex_to_unrealized_loss = - EagerVec::forced_import(cfg.db, &cfg.name("unrealized_loss"), cfg.version)?; - let height_to_unrealized_loss: EagerVec> = - EagerVec::forced_import(cfg.db, &cfg.name("unrealized_loss"), cfg.version)?; - let height_to_neg_unrealized_loss = LazyVecFrom1::transformed::( - &cfg.name("neg_unrealized_loss"), + // === Supply in Profit/Loss === + let supply_in_profit = ValueBlockDateLast::forced_import( + cfg.db, + &cfg.name("supply_in_profit"), cfg.version, - height_to_unrealized_loss.boxed_clone(), - ); - - let indexes_to_unrealized_loss = DerivedDateLast::from_source( - &cfg.name("unrealized_loss"), - cfg.version, - dateindex_to_unrealized_loss.boxed_clone(), + compute_dollars, cfg.indexes, - ); - - let indexes_to_neg_unrealized_loss = LazyDateLast::from_derived::( - &cfg.name("neg_unrealized_loss"), + cfg.price, + )?; + let supply_in_loss = ValueBlockDateLast::forced_import( + cfg.db, + &cfg.name("supply_in_loss"), cfg.version, - dateindex_to_unrealized_loss.boxed_clone(), - &indexes_to_unrealized_loss, - ); + compute_dollars, + cfg.indexes, + cfg.price, + )?; - // Extract profit sources for lazy net/total vecs - let height_to_unrealized_profit: EagerVec> = - EagerVec::forced_import(cfg.db, &cfg.name("unrealized_profit"), cfg.version)?; - let indexes_to_unrealized_profit = DerivedDateLast::from_source( + // === Unrealized Profit/Loss === + let unrealized_profit = ComputedHeightDateLast::forced_import( + cfg.db, &cfg.name("unrealized_profit"), cfg.version, - dateindex_to_unrealized_profit.boxed_clone(), - cfg.indexes, - ); - - // Create lazy height vecs from profit/loss sources - let height_to_net_unrealized_pnl = LazyVecFrom2::transformed::( - &cfg.name("net_unrealized_pnl"), - cfg.version, - height_to_unrealized_profit.boxed_clone(), - height_to_unrealized_loss.boxed_clone(), - ); - let height_to_total_unrealized_pnl = LazyVecFrom2::transformed::( - &cfg.name("total_unrealized_pnl"), - cfg.version, - height_to_unrealized_profit.boxed_clone(), - height_to_unrealized_loss.boxed_clone(), - ); - - // indexes_to_net/total remain computed (needed by relative.rs) - let indexes_to_net_unrealized_pnl = ComputedDateLast::forced_import( - cfg.db, - &cfg.name("net_unrealized_pnl"), - cfg.version, cfg.indexes, )?; - let indexes_to_total_unrealized_pnl = ComputedDateLast::forced_import( + let unrealized_loss = ComputedHeightDateLast::forced_import( cfg.db, - &cfg.name("total_unrealized_pnl"), + &cfg.name("unrealized_loss"), cfg.version, cfg.indexes, )?; - let height_to_supply_in_profit: EagerVec> = - EagerVec::forced_import(cfg.db, &cfg.name("supply_in_profit"), cfg.version)?; - let height_to_supply_in_loss: EagerVec> = - EagerVec::forced_import(cfg.db, &cfg.name("supply_in_loss"), cfg.version)?; - - let price_source = cfg - .price - .map(|p| p.usd.chainindexes_to_price_close.height.boxed_clone()); - - let height_to_supply_in_profit_value = LazyDerivedBlockValue::from_source( - &cfg.name("supply_in_profit"), - height_to_supply_in_profit.boxed_clone(), + // === Negated === + let neg_unrealized_loss = LazyBlockLast::from_computed_height_date::( + &cfg.name("neg_unrealized_loss"), cfg.version, - price_source.clone(), + &unrealized_loss, ); - let height_to_supply_in_loss_value = LazyDerivedBlockValue::from_source( - &cfg.name("supply_in_loss"), - height_to_supply_in_loss.boxed_clone(), + + // === Net and Total === + let net_unrealized_pnl = LazyBinaryBlockLast::from_computed_height_date_last::( + &cfg.name("net_unrealized_pnl"), cfg.version, - price_source, + &unrealized_profit, + &unrealized_loss, + ); + let total_unrealized_pnl = LazyBinaryBlockLast::from_computed_height_date_last::( + &cfg.name("total_unrealized_pnl"), + cfg.version, + &unrealized_profit, + &unrealized_loss, ); Ok(Self { - // === Supply in Profit/Loss === - height_to_supply_in_profit, - indexes_to_supply_in_profit: ValueDerivedDateLast::from_source( - cfg.db, - &cfg.name("supply_in_profit"), - dateindex_to_supply_in_profit.boxed_clone(), - cfg.version, - compute_dollars, - cfg.indexes, - )?, - height_to_supply_in_loss, - indexes_to_supply_in_loss: ValueDerivedDateLast::from_source( - cfg.db, - &cfg.name("supply_in_loss"), - dateindex_to_supply_in_loss.boxed_clone(), - cfg.version, - compute_dollars, - cfg.indexes, - )?, - dateindex_to_supply_in_profit, - dateindex_to_supply_in_loss, - height_to_supply_in_profit_value, - height_to_supply_in_loss_value, - - // === Unrealized Profit/Loss === - height_to_unrealized_profit, - indexes_to_unrealized_profit, - height_to_unrealized_loss, - indexes_to_unrealized_loss, - dateindex_to_unrealized_profit, - dateindex_to_unrealized_loss, - - height_to_neg_unrealized_loss, - indexes_to_neg_unrealized_loss, - height_to_net_unrealized_pnl, - indexes_to_net_unrealized_pnl, - height_to_total_unrealized_pnl, - indexes_to_total_unrealized_pnl, + supply_in_profit, + supply_in_loss, + unrealized_profit, + unrealized_loss, + neg_unrealized_loss, + net_unrealized_pnl, + total_unrealized_pnl, }) } /// Get minimum length across height-indexed vectors written in block loop. pub fn min_stateful_height_len(&self) -> usize { - self.height_to_supply_in_profit + self.supply_in_profit + .height .len() - .min(self.height_to_supply_in_loss.len()) - .min(self.height_to_unrealized_profit.len()) - .min(self.height_to_unrealized_loss.len()) + .min(self.supply_in_loss.height.len()) + .min(self.unrealized_profit.height.len()) + .min(self.unrealized_loss.height.len()) } /// Get minimum length across dateindex-indexed vectors written in block loop. pub fn min_stateful_dateindex_len(&self) -> usize { - self.dateindex_to_supply_in_profit + self.supply_in_profit + .indexes + .sats_dateindex .len() - .min(self.dateindex_to_supply_in_loss.len()) - .min(self.dateindex_to_unrealized_profit.len()) - .min(self.dateindex_to_unrealized_loss.len()) + .min(self.supply_in_loss.indexes.sats_dateindex.len()) + .min(self.unrealized_profit.dateindex.len()) + .min(self.unrealized_loss.dateindex.len()) } /// Push unrealized state values to height-indexed vectors. @@ -217,23 +132,33 @@ impl UnrealizedMetrics { height_state: &UnrealizedState, date_state: Option<&UnrealizedState>, ) -> Result<()> { - self.height_to_supply_in_profit + self.supply_in_profit + .height .truncate_push(height, height_state.supply_in_profit)?; - self.height_to_supply_in_loss + self.supply_in_loss + .height .truncate_push(height, height_state.supply_in_loss)?; - self.height_to_unrealized_profit + self.unrealized_profit + .height .truncate_push(height, height_state.unrealized_profit)?; - self.height_to_unrealized_loss + self.unrealized_loss + .height .truncate_push(height, height_state.unrealized_loss)?; if let (Some(dateindex), Some(date_state)) = (dateindex, date_state) { - self.dateindex_to_supply_in_profit + self.supply_in_profit + .indexes + .sats_dateindex .truncate_push(dateindex, date_state.supply_in_profit)?; - self.dateindex_to_supply_in_loss + self.supply_in_loss + .indexes + .sats_dateindex .truncate_push(dateindex, date_state.supply_in_loss)?; - self.dateindex_to_unrealized_profit + self.unrealized_profit + .dateindex .truncate_push(dateindex, date_state.unrealized_profit)?; - self.dateindex_to_unrealized_loss + self.unrealized_loss + .dateindex .truncate_push(dateindex, date_state.unrealized_loss)?; } @@ -242,28 +167,28 @@ impl UnrealizedMetrics { /// Write height-indexed vectors to disk. pub fn write(&mut self) -> Result<()> { - self.height_to_supply_in_profit.write()?; - self.height_to_supply_in_loss.write()?; - self.height_to_unrealized_profit.write()?; - self.height_to_unrealized_loss.write()?; - self.dateindex_to_supply_in_profit.write()?; - self.dateindex_to_supply_in_loss.write()?; - self.dateindex_to_unrealized_profit.write()?; - self.dateindex_to_unrealized_loss.write()?; + self.supply_in_profit.height.write()?; + self.supply_in_loss.height.write()?; + self.unrealized_profit.height.write()?; + self.unrealized_loss.height.write()?; + self.supply_in_profit.indexes.sats_dateindex.write()?; + self.supply_in_loss.indexes.sats_dateindex.write()?; + self.unrealized_profit.dateindex.write()?; + self.unrealized_loss.dateindex.write()?; Ok(()) } /// Returns a parallel iterator over all vecs for parallel writing. pub fn par_iter_mut(&mut self) -> impl ParallelIterator { vec![ - &mut self.height_to_supply_in_profit as &mut dyn AnyStoredVec, - &mut self.height_to_supply_in_loss as &mut dyn AnyStoredVec, - &mut self.height_to_unrealized_profit as &mut dyn AnyStoredVec, - &mut self.height_to_unrealized_loss as &mut dyn AnyStoredVec, - &mut self.dateindex_to_supply_in_profit as &mut dyn AnyStoredVec, - &mut self.dateindex_to_supply_in_loss as &mut dyn AnyStoredVec, - &mut self.dateindex_to_unrealized_profit as &mut dyn AnyStoredVec, - &mut self.dateindex_to_unrealized_loss as &mut dyn AnyStoredVec, + &mut self.supply_in_profit.height as &mut dyn AnyStoredVec, + &mut self.supply_in_loss.height as &mut dyn AnyStoredVec, + &mut self.unrealized_profit.height as &mut dyn AnyStoredVec, + &mut self.unrealized_loss.height as &mut dyn AnyStoredVec, + &mut self.supply_in_profit.indexes.sats_dateindex as &mut dyn AnyStoredVec, + &mut self.supply_in_loss.indexes.sats_dateindex as &mut dyn AnyStoredVec, + &mut self.unrealized_profit.rest.dateindex as &mut dyn AnyStoredVec, + &mut self.unrealized_loss.rest.dateindex as &mut dyn AnyStoredVec, ] .into_par_iter() } @@ -275,67 +200,73 @@ impl UnrealizedMetrics { others: &[&Self], exit: &Exit, ) -> Result<()> { - self.height_to_supply_in_profit.compute_sum_of_others( + self.supply_in_profit.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_supply_in_profit) + .map(|v| &v.supply_in_profit.height) .collect::>(), exit, )?; - self.height_to_supply_in_loss.compute_sum_of_others( + self.supply_in_loss.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_supply_in_loss) + .map(|v| &v.supply_in_loss.height) .collect::>(), exit, )?; - self.height_to_unrealized_profit.compute_sum_of_others( + self.unrealized_profit.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_unrealized_profit) + .map(|v| &v.unrealized_profit.height) .collect::>(), exit, )?; - self.height_to_unrealized_loss.compute_sum_of_others( + self.unrealized_loss.height.compute_sum_of_others( starting_indexes.height, &others .iter() - .map(|v| &v.height_to_unrealized_loss) + .map(|v| &v.unrealized_loss.height) .collect::>(), exit, )?; - self.dateindex_to_supply_in_profit.compute_sum_of_others( + self.supply_in_profit + .indexes + .sats_dateindex + .compute_sum_of_others( + starting_indexes.dateindex, + &others + .iter() + .map(|v| &v.supply_in_profit.indexes.sats_dateindex) + .collect::>(), + exit, + )?; + self.supply_in_loss + .indexes + .sats_dateindex + .compute_sum_of_others( + starting_indexes.dateindex, + &others + .iter() + .map(|v| &v.supply_in_loss.indexes.sats_dateindex) + .collect::>(), + exit, + )?; + self.unrealized_profit.dateindex.compute_sum_of_others( starting_indexes.dateindex, &others .iter() - .map(|v| &v.dateindex_to_supply_in_profit) + .map(|v| &v.unrealized_profit.dateindex) .collect::>(), exit, )?; - self.dateindex_to_supply_in_loss.compute_sum_of_others( + self.unrealized_loss.dateindex.compute_sum_of_others( starting_indexes.dateindex, &others .iter() - .map(|v| &v.dateindex_to_supply_in_loss) - .collect::>(), - exit, - )?; - self.dateindex_to_unrealized_profit.compute_sum_of_others( - starting_indexes.dateindex, - &others - .iter() - .map(|v| &v.dateindex_to_unrealized_profit) - .collect::>(), - exit, - )?; - self.dateindex_to_unrealized_loss.compute_sum_of_others( - starting_indexes.dateindex, - &others - .iter() - .map(|v| &v.dateindex_to_unrealized_loss) + .map(|v| &v.unrealized_loss.dateindex) .collect::>(), exit, )?; @@ -349,39 +280,11 @@ impl UnrealizedMetrics { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - // KISS: compute_rest doesn't need source vec - lazy vecs are set up during import - self.indexes_to_supply_in_profit - .compute_rest(price, starting_indexes, exit)?; + self.supply_in_profit + .compute_dollars_from_price(price, starting_indexes, exit)?; - self.indexes_to_supply_in_loss - .compute_rest(price, starting_indexes, exit)?; - - // indexes_to_unrealized_profit/loss are Derived - no compute needed (lazy only) - - // height_to_net/total are lazy, but indexes still need compute - // total_unrealized_pnl = profit + loss - self.indexes_to_total_unrealized_pnl - .compute_all(starting_indexes, exit, |vec| { - vec.compute_add( - starting_indexes.dateindex, - &self.dateindex_to_unrealized_profit, - &self.dateindex_to_unrealized_loss, - exit, - )?; - Ok(()) - })?; - - // net_unrealized_pnl = profit - loss - self.indexes_to_net_unrealized_pnl - .compute_all(starting_indexes, exit, |vec| { - vec.compute_subtract( - starting_indexes.dateindex, - &self.dateindex_to_unrealized_profit, - &self.dateindex_to_unrealized_loss, - exit, - )?; - Ok(()) - })?; + self.supply_in_loss + .compute_dollars_from_price(price, starting_indexes, exit)?; Ok(()) } diff --git a/crates/brk_computer/src/distribution/vecs.rs b/crates/brk_computer/src/distribution/vecs.rs index d6e522844..ba75d7a1b 100644 --- a/crates/brk_computer/src/distribution/vecs.rs +++ b/crates/brk_computer/src/distribution/vecs.rs @@ -4,10 +4,10 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{ - EmptyAddressData, EmptyAddressIndex, Height, LoadedAddressData, LoadedAddressIndex, StoredU64, + EmptyAddressData, EmptyAddressIndex, Height, LoadedAddressData, LoadedAddressIndex, SupplyState, Version, }; -use log::info; +use tracing::info; use vecdb::{ AnyVec, BytesVec, Database, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, PAGE_SIZE, Stamp, TypedVecIterator, VecIndex, @@ -19,14 +19,11 @@ use crate::{ compute::{StartMode, determine_start_mode, process_blocks, recover_state, reset_state}, state::BlockState, }, - indexes, inputs, - internal::ComputedBlockLast, - outputs, price, transactions, + indexes, inputs, outputs, price, transactions, }; use super::{ - AddressCohorts, AddressesDataVecs, AnyAddressIndexesVecs, UTXOCohorts, - address::{AddressTypeToHeightToAddressCount, AddressTypeToIndexesToAddressCount}, + AddressCohorts, AddressesDataVecs, AnyAddressIndexesVecs, UTXOCohorts, address::AddrCountVecs, compute::aggregates, }; @@ -44,13 +41,8 @@ pub struct Vecs { pub utxo_cohorts: UTXOCohorts, pub address_cohorts: AddressCohorts, - pub addresstype_to_height_to_addr_count: AddressTypeToHeightToAddressCount, - pub addresstype_to_height_to_empty_addr_count: AddressTypeToHeightToAddressCount, - - pub addresstype_to_indexes_to_addr_count: AddressTypeToIndexesToAddressCount, - pub addresstype_to_indexes_to_empty_addr_count: AddressTypeToIndexesToAddressCount, - pub indexes_to_addr_count: ComputedBlockLast, - pub indexes_to_empty_addr_count: ComputedBlockLast, + pub addr_count: AddrCountVecs, + pub empty_addr_count: AddrCountVecs, pub loadedaddressindex_to_loadedaddressindex: LazyVecFrom1, pub emptyaddressindex_to_emptyaddressindex: @@ -111,50 +103,20 @@ impl Vecs { |index, _| Some(index), ); - // Extract address type height vecs before struct literal to use as sources - let addresstype_to_height_to_addr_count = - AddressTypeToHeightToAddressCount::forced_import(&db, "addr_count", version)?; - let addresstype_to_height_to_empty_addr_count = - AddressTypeToHeightToAddressCount::forced_import(&db, "empty_addr_count", version)?; - let this = Self { chain_state: BytesVec::forced_import_with( vecdb::ImportOptions::new(&db, "chain", version) .with_saved_stamped_changes(SAVED_STAMPED_CHANGES), )?, - indexes_to_addr_count: ComputedBlockLast::forced_import( - &db, - "addr_count", - version, - indexes, - )?, - indexes_to_empty_addr_count: ComputedBlockLast::forced_import( + addr_count: AddrCountVecs::forced_import(&db, "addr_count", version, indexes)?, + empty_addr_count: AddrCountVecs::forced_import( &db, "empty_addr_count", version, indexes, )?, - addresstype_to_indexes_to_addr_count: - AddressTypeToIndexesToAddressCount::forced_import( - &db, - "addr_count", - version, - indexes, - &addresstype_to_height_to_addr_count, - )?, - addresstype_to_indexes_to_empty_addr_count: - AddressTypeToIndexesToAddressCount::forced_import( - &db, - "empty_addr_count", - version, - indexes, - &addresstype_to_height_to_empty_addr_count, - )?, - addresstype_to_height_to_addr_count, - addresstype_to_height_to_empty_addr_count, - utxo_cohorts, address_cohorts, @@ -200,14 +162,22 @@ impl Vecs { starting_indexes: &mut ComputeIndexes, exit: &Exit, ) -> Result<()> { - // 1. Find minimum computed height for recovery - let chain_state_height = Height::from(self.chain_state.len()); + // 1. Find minimum height we have data for across stateful vecs + let current_height = Height::from(self.chain_state.len()); let height_based_min = self.min_stateful_height_len(); let dateindex_min = self.min_stateful_dateindex_len(); - let stateful_min = adjust_for_dateindex_gap(height_based_min, dateindex_min, indexes)?; + let min_stateful = adjust_for_dateindex_gap(height_based_min, dateindex_min, indexes)?; // 2. Determine start mode and recover/reset state - let start_mode = determine_start_mode(stateful_min, chain_state_height); + // Clamp to starting_indexes.height to handle reorg (indexer may require earlier start) + let resume_target = current_height.min(starting_indexes.height); + if resume_target < current_height { + info!( + "Reorg detected: rolling back from {} to {}", + current_height, resume_target + ); + } + let start_mode = determine_start_mode(min_stateful.min(resume_target), resume_target); // Try to resume from checkpoint, fall back to fresh start if needed let recovered_height = match start_mode { @@ -238,8 +208,8 @@ impl Vecs { // Fresh start: reset all state let (starting_height, mut chain_state) = if recovered_height.is_zero() { self.chain_state.reset()?; - self.addresstype_to_height_to_addr_count.reset()?; - self.addresstype_to_height_to_empty_addr_count.reset()?; + self.addr_count.reset_height()?; + self.empty_addr_count.reset_height()?; reset_state( &mut self.any_address_indexes, &mut self.addresses_data, @@ -251,8 +221,8 @@ impl Vecs { (Height::ZERO, vec![]) } else { // Recover chain_state from stored values - let height_to_timestamp = &blocks.time.height_to_timestamp_fixed; - let height_to_price = price.map(|p| &p.usd.chainindexes_to_price_close.height); + let height_to_timestamp = &blocks.time.timestamp_fixed; + let height_to_price = price.map(|p| &p.usd.split.close.height); let mut height_to_timestamp_iter = height_to_timestamp.into_iter(); let mut height_to_price_iter = height_to_price.map(|v| v.into_iter()); @@ -279,14 +249,7 @@ impl Vecs { .validate_computed_versions(base_version)?; // 3. Get last height from indexer - let last_height = Height::from( - indexer - .vecs - .block - .height_to_blockhash - .len() - .saturating_sub(1), - ); + let last_height = Height::from(indexer.vecs.blocks.blockhash.len().saturating_sub(1)); // 4. Process blocks if starting_height <= last_height { @@ -324,64 +287,26 @@ impl Vecs { exit, )?; - // 6b. Compute address count dateindex vecs (per-addresstype) - self.addresstype_to_indexes_to_addr_count.compute( - indexes, - starting_indexes, - exit, - &self.addresstype_to_height_to_addr_count, - )?; - self.addresstype_to_indexes_to_empty_addr_count.compute( - indexes, - starting_indexes, - exit, - &self.addresstype_to_height_to_empty_addr_count, - )?; - - // 6c. Compute global address count dateindex vecs (sum of all address types) - let addr_count_sources: Vec<_> = - self.addresstype_to_height_to_addr_count.values().collect(); - self.indexes_to_addr_count - .compute_all(indexes, starting_indexes, exit, |height_vec| { - Ok(height_vec.compute_sum_of_others( - starting_indexes.height, - &addr_count_sources, - exit, - )?) - })?; - - let empty_addr_count_sources: Vec<_> = self - .addresstype_to_height_to_empty_addr_count - .values() - .collect(); - self.indexes_to_empty_addr_count.compute_all( - indexes, - starting_indexes, - exit, - |height_vec| { - Ok(height_vec.compute_sum_of_others( - starting_indexes.height, - &empty_addr_count_sources, - exit, - )?) - }, - )?; + // 6b. Compute address count dateindex vecs (by addresstype + all) + self.addr_count + .compute_rest(indexes, starting_indexes, exit)?; + self.empty_addr_count + .compute_rest(indexes, starting_indexes, exit)?; // 7. Compute rest part2 (relative metrics) let supply_metrics = &self.utxo_cohorts.all.metrics.supply; let height_to_market_cap = supply_metrics - .height_to_supply_value + .supply .dollars .as_ref() - .cloned(); + .map(|d| d.height.clone()); - // KISS: dateindex is no longer Option, just clone directly let dateindex_to_market_cap = supply_metrics - .indexes_to_supply + .supply .dollars .as_ref() - .map(|v| v.dateindex.clone()); + .map(|d| d.dateindex.0.clone()); let height_to_market_cap_ref = height_to_market_cap.as_ref(); let dateindex_to_market_cap_ref = dateindex_to_market_cap.as_ref(); @@ -415,12 +340,8 @@ impl Vecs { .min(Height::from(self.chain_state.len())) .min(self.any_address_indexes.min_stamped_height()) .min(self.addresses_data.min_stamped_height()) - .min(Height::from( - self.addresstype_to_height_to_addr_count.min_len(), - )) - .min(Height::from( - self.addresstype_to_height_to_empty_addr_count.min_len(), - )) + .min(Height::from(self.addr_count.min_len())) + .min(Height::from(self.empty_addr_count.min_len())) } /// Get minimum length across all dateindex-indexed stateful vectors. @@ -446,25 +367,25 @@ fn adjust_for_dateindex_gap( return Ok(height_based_min); } - // Skip if height_to_dateindex doesn't cover height_based_min yet - if height_based_min.to_usize() >= indexes.block.height_to_dateindex.len() { + // Skip if height.dateindex doesn't cover height_based_min yet + if height_based_min.to_usize() >= indexes.height.dateindex.len() { return Ok(height_based_min); } // Get the dateindex at the height we want to resume at let required_dateindex: usize = indexes - .block - .height_to_dateindex + .height + .dateindex .read_once(height_based_min)? .into(); // If dateindex vecs are behind, restart from first height of the missing day if dateindex_min < required_dateindex - && dateindex_min < indexes.time.dateindex_to_first_height.len() + && dateindex_min < indexes.dateindex.first_height.len() { Ok(indexes - .time - .dateindex_to_first_height + .dateindex + .first_height .read_once(dateindex_min.into())?) } else { Ok(height_based_min) diff --git a/crates/brk_computer/src/indexes/address.rs b/crates/brk_computer/src/indexes/address.rs new file mode 100644 index 000000000..a04ff751e --- /dev/null +++ b/crates/brk_computer/src/indexes/address.rs @@ -0,0 +1,188 @@ +use brk_indexer::Indexer; +use brk_traversable::Traversable; +use brk_types::{ + EmptyOutputIndex, OpReturnIndex, P2AAddressIndex, P2ABytes, P2MSOutputIndex, + P2PK33AddressIndex, P2PK33Bytes, P2PK65AddressIndex, P2PK65Bytes, P2PKHAddressIndex, + P2PKHBytes, P2SHAddressIndex, P2SHBytes, P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex, + P2WPKHBytes, P2WSHAddressIndex, P2WSHBytes, TxIndex, UnknownOutputIndex, Version, +}; +use vecdb::{IterableCloneableVec, LazyVecFrom1}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub p2pk33: P2PK33Vecs, + pub p2pk65: P2PK65Vecs, + pub p2pkh: P2PKHVecs, + pub p2sh: P2SHVecs, + pub p2tr: P2TRVecs, + pub p2wpkh: P2WPKHVecs, + pub p2wsh: P2WSHVecs, + pub p2a: P2AVecs, + pub p2ms: P2MSVecs, + pub empty: EmptyVecs, + pub unknown: UnknownVecs, + pub opreturn: OpReturnVecs, +} + +#[derive(Clone, Traversable)] +pub struct P2PK33Vecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct P2PK65Vecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct P2PKHVecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct P2SHVecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct P2TRVecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct P2WPKHVecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct P2WSHVecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct P2AVecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct P2MSVecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct EmptyVecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct UnknownVecs { + pub identity: LazyVecFrom1, +} + +#[derive(Clone, Traversable)] +pub struct OpReturnVecs { + pub identity: LazyVecFrom1, +} + +impl Vecs { + pub fn forced_import(version: Version, indexer: &Indexer) -> Self { + Self { + p2pk33: P2PK33Vecs { + identity: LazyVecFrom1::init( + "p2pk33addressindex", + version, + indexer.vecs.addresses.p2pk33bytes.boxed_clone(), + |index, _| Some(index), + ), + }, + p2pk65: P2PK65Vecs { + identity: LazyVecFrom1::init( + "p2pk65addressindex", + version, + indexer.vecs.addresses.p2pk65bytes.boxed_clone(), + |index, _| Some(index), + ), + }, + p2pkh: P2PKHVecs { + identity: LazyVecFrom1::init( + "p2pkhaddressindex", + version, + indexer.vecs.addresses.p2pkhbytes.boxed_clone(), + |index, _| Some(index), + ), + }, + p2sh: P2SHVecs { + identity: LazyVecFrom1::init( + "p2shaddressindex", + version, + indexer.vecs.addresses.p2shbytes.boxed_clone(), + |index, _| Some(index), + ), + }, + p2tr: P2TRVecs { + identity: LazyVecFrom1::init( + "p2traddressindex", + version, + indexer.vecs.addresses.p2trbytes.boxed_clone(), + |index, _| Some(index), + ), + }, + p2wpkh: P2WPKHVecs { + identity: LazyVecFrom1::init( + "p2wpkhaddressindex", + version, + indexer.vecs.addresses.p2wpkhbytes.boxed_clone(), + |index, _| Some(index), + ), + }, + p2wsh: P2WSHVecs { + identity: LazyVecFrom1::init( + "p2wshaddressindex", + version, + indexer.vecs.addresses.p2wshbytes.boxed_clone(), + |index, _| Some(index), + ), + }, + p2a: P2AVecs { + identity: LazyVecFrom1::init( + "p2aaddressindex", + version, + indexer.vecs.addresses.p2abytes.boxed_clone(), + |index, _| Some(index), + ), + }, + p2ms: P2MSVecs { + identity: LazyVecFrom1::init( + "p2msoutputindex", + version, + indexer.vecs.scripts.p2ms_to_txindex.boxed_clone(), + |index, _| Some(index), + ), + }, + empty: EmptyVecs { + identity: LazyVecFrom1::init( + "emptyoutputindex", + version, + indexer.vecs.scripts.empty_to_txindex.boxed_clone(), + |index, _| Some(index), + ), + }, + unknown: UnknownVecs { + identity: LazyVecFrom1::init( + "unknownoutputindex", + version, + indexer.vecs.scripts.unknown_to_txindex.boxed_clone(), + |index, _| Some(index), + ), + }, + opreturn: OpReturnVecs { + identity: LazyVecFrom1::init( + "opreturnindex", + version, + indexer.vecs.scripts.opreturn_to_txindex.boxed_clone(), + |index, _| Some(index), + ), + }, + } + } +} diff --git a/crates/brk_computer/src/indexes/address/import.rs b/crates/brk_computer/src/indexes/address/import.rs deleted file mode 100644 index 8e99e7c37..000000000 --- a/crates/brk_computer/src/indexes/address/import.rs +++ /dev/null @@ -1,84 +0,0 @@ -use brk_indexer::Indexer; -use brk_types::Version; -use vecdb::{IterableCloneableVec, LazyVecFrom1}; - -use super::Vecs; - -impl Vecs { - pub fn forced_import(version: Version, indexer: &Indexer) -> Self { - Self { - p2pk33addressindex_to_p2pk33addressindex: LazyVecFrom1::init( - "p2pk33addressindex", - version, - indexer.vecs.address.p2pk33addressindex_to_p2pk33bytes.boxed_clone(), - |index, _| Some(index), - ), - p2pk65addressindex_to_p2pk65addressindex: LazyVecFrom1::init( - "p2pk65addressindex", - version, - indexer.vecs.address.p2pk65addressindex_to_p2pk65bytes.boxed_clone(), - |index, _| Some(index), - ), - p2pkhaddressindex_to_p2pkhaddressindex: LazyVecFrom1::init( - "p2pkhaddressindex", - version, - indexer.vecs.address.p2pkhaddressindex_to_p2pkhbytes.boxed_clone(), - |index, _| Some(index), - ), - p2shaddressindex_to_p2shaddressindex: LazyVecFrom1::init( - "p2shaddressindex", - version, - indexer.vecs.address.p2shaddressindex_to_p2shbytes.boxed_clone(), - |index, _| Some(index), - ), - p2traddressindex_to_p2traddressindex: LazyVecFrom1::init( - "p2traddressindex", - version, - indexer.vecs.address.p2traddressindex_to_p2trbytes.boxed_clone(), - |index, _| Some(index), - ), - p2wpkhaddressindex_to_p2wpkhaddressindex: LazyVecFrom1::init( - "p2wpkhaddressindex", - version, - indexer.vecs.address.p2wpkhaddressindex_to_p2wpkhbytes.boxed_clone(), - |index, _| Some(index), - ), - p2wshaddressindex_to_p2wshaddressindex: LazyVecFrom1::init( - "p2wshaddressindex", - version, - indexer.vecs.address.p2wshaddressindex_to_p2wshbytes.boxed_clone(), - |index, _| Some(index), - ), - p2aaddressindex_to_p2aaddressindex: LazyVecFrom1::init( - "p2aaddressindex", - version, - indexer.vecs.address.p2aaddressindex_to_p2abytes.boxed_clone(), - |index, _| Some(index), - ), - p2msoutputindex_to_p2msoutputindex: LazyVecFrom1::init( - "p2msoutputindex", - version, - indexer.vecs.output.p2msoutputindex_to_txindex.boxed_clone(), - |index, _| Some(index), - ), - emptyoutputindex_to_emptyoutputindex: LazyVecFrom1::init( - "emptyoutputindex", - version, - indexer.vecs.output.emptyoutputindex_to_txindex.boxed_clone(), - |index, _| Some(index), - ), - unknownoutputindex_to_unknownoutputindex: LazyVecFrom1::init( - "unknownoutputindex", - version, - indexer.vecs.output.unknownoutputindex_to_txindex.boxed_clone(), - |index, _| Some(index), - ), - opreturnindex_to_opreturnindex: LazyVecFrom1::init( - "opreturnindex", - version, - indexer.vecs.output.opreturnindex_to_txindex.boxed_clone(), - |index, _| Some(index), - ), - } - } -} diff --git a/crates/brk_computer/src/indexes/address/vecs.rs b/crates/brk_computer/src/indexes/address/vecs.rs deleted file mode 100644 index 6f5e115f4..000000000 --- a/crates/brk_computer/src/indexes/address/vecs.rs +++ /dev/null @@ -1,36 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{ - EmptyOutputIndex, OpReturnIndex, P2AAddressIndex, P2ABytes, P2MSOutputIndex, - P2PK33AddressIndex, P2PK33Bytes, P2PK65AddressIndex, P2PK65Bytes, P2PKHAddressIndex, - P2PKHBytes, P2SHAddressIndex, P2SHBytes, P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex, - P2WPKHBytes, P2WSHAddressIndex, P2WSHBytes, TxIndex, UnknownOutputIndex, -}; -use vecdb::LazyVecFrom1; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub emptyoutputindex_to_emptyoutputindex: - LazyVecFrom1, - pub opreturnindex_to_opreturnindex: - LazyVecFrom1, - pub p2aaddressindex_to_p2aaddressindex: - LazyVecFrom1, - pub p2msoutputindex_to_p2msoutputindex: - LazyVecFrom1, - pub p2pk33addressindex_to_p2pk33addressindex: - LazyVecFrom1, - pub p2pk65addressindex_to_p2pk65addressindex: - LazyVecFrom1, - pub p2pkhaddressindex_to_p2pkhaddressindex: - LazyVecFrom1, - pub p2shaddressindex_to_p2shaddressindex: - LazyVecFrom1, - pub p2traddressindex_to_p2traddressindex: - LazyVecFrom1, - pub p2wpkhaddressindex_to_p2wpkhaddressindex: - LazyVecFrom1, - pub p2wshaddressindex_to_p2wshaddressindex: - LazyVecFrom1, - pub unknownoutputindex_to_unknownoutputindex: - LazyVecFrom1, -} diff --git a/crates/brk_computer/src/indexes/block/compute.rs b/crates/brk_computer/src/indexes/block/compute.rs deleted file mode 100644 index f20b01f51..000000000 --- a/crates/brk_computer/src/indexes/block/compute.rs +++ /dev/null @@ -1,115 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::{DateIndex, DifficultyEpoch, HalvingEpoch}; -use vecdb::{Exit, TypedVecIterator}; - -use super::Vecs; -use crate::blocks; - -impl Vecs { - pub fn compute( - &mut self, - indexer: &Indexer, - blocks_time: &blocks::time::Vecs, - starting_indexes: &brk_indexer::Indexes, - exit: &Exit, - ) -> Result<(DateIndex, DifficultyEpoch, HalvingEpoch)> { - self.height_to_txindex_count.compute_count_from_indexes( - starting_indexes.height, - &indexer.vecs.tx.height_to_first_txindex, - &indexer.vecs.tx.txindex_to_txid, - exit, - )?; - - self.height_to_height.compute_from_index( - starting_indexes.height, - &indexer.vecs.block.height_to_weight, - exit, - )?; - - let decremented_starting_height = starting_indexes.height.decremented().unwrap_or_default(); - - // DateIndex (uses blocks_time.height_to_date_fixed computed in blocks::time::compute_early) - let starting_dateindex = self - .height_to_dateindex - .into_iter() - .get(decremented_starting_height) - .unwrap_or_default(); - - self.height_to_dateindex.compute_transform( - starting_indexes.height, - &blocks_time.height_to_date_fixed, - |(h, d, ..)| (h, DateIndex::try_from(d).unwrap()), - exit, - )?; - - let starting_dateindex = if let Some(dateindex) = self - .height_to_dateindex - .into_iter() - .get(decremented_starting_height) - { - starting_dateindex.min(dateindex) - } else { - starting_dateindex - }; - - // Difficulty epoch - let starting_difficultyepoch = self - .height_to_difficultyepoch - .into_iter() - .get(decremented_starting_height) - .unwrap_or_default(); - - self.height_to_difficultyepoch.compute_from_index( - starting_indexes.height, - &indexer.vecs.block.height_to_weight, - exit, - )?; - - self.difficultyepoch_to_first_height.compute_coarser( - starting_indexes.height, - &self.height_to_difficultyepoch, - exit, - )?; - - self.difficultyepoch_to_difficultyepoch.compute_from_index( - starting_difficultyepoch, - &self.difficultyepoch_to_first_height, - exit, - )?; - - self.difficultyepoch_to_height_count.compute_count_from_indexes( - starting_difficultyepoch, - &self.difficultyepoch_to_first_height, - &blocks_time.height_to_date, - exit, - )?; - - // Halving epoch - let starting_halvingepoch = self - .height_to_halvingepoch - .into_iter() - .get(decremented_starting_height) - .unwrap_or_default(); - - self.height_to_halvingepoch.compute_from_index( - starting_indexes.height, - &indexer.vecs.block.height_to_weight, - exit, - )?; - - self.halvingepoch_to_first_height.compute_coarser( - starting_indexes.height, - &self.height_to_halvingepoch, - exit, - )?; - - self.halvingepoch_to_halvingepoch.compute_from_index( - starting_halvingepoch, - &self.halvingepoch_to_first_height, - exit, - )?; - - Ok((starting_dateindex, starting_difficultyepoch, starting_halvingepoch)) - } -} diff --git a/crates/brk_computer/src/indexes/block/import.rs b/crates/brk_computer/src/indexes/block/import.rs deleted file mode 100644 index 28017b2c2..000000000 --- a/crates/brk_computer/src/indexes/block/import.rs +++ /dev/null @@ -1,22 +0,0 @@ -use brk_error::Result; -use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec}; - -use super::Vecs; - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - height_to_dateindex: EagerVec::forced_import(db, "dateindex", version)?, - height_to_difficultyepoch: EagerVec::forced_import(db, "difficultyepoch", version)?, - height_to_halvingepoch: EagerVec::forced_import(db, "halvingepoch", version)?, - height_to_height: EagerVec::forced_import(db, "height", version)?, - height_to_txindex_count: EagerVec::forced_import(db, "txindex_count", version)?, - difficultyepoch_to_difficultyepoch: EagerVec::forced_import(db, "difficultyepoch", version)?, - difficultyepoch_to_first_height: EagerVec::forced_import(db, "first_height", version)?, - difficultyepoch_to_height_count: EagerVec::forced_import(db, "height_count", version)?, - halvingepoch_to_first_height: EagerVec::forced_import(db, "first_height", version)?, - halvingepoch_to_halvingepoch: EagerVec::forced_import(db, "halvingepoch", version)?, - }) - } -} diff --git a/crates/brk_computer/src/indexes/block/mod.rs b/crates/brk_computer/src/indexes/block/mod.rs deleted file mode 100644 index 1136f9ebd..000000000 --- a/crates/brk_computer/src/indexes/block/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod compute; -mod import; -mod vecs; - -pub use vecs::Vecs; diff --git a/crates/brk_computer/src/indexes/block/vecs.rs b/crates/brk_computer/src/indexes/block/vecs.rs deleted file mode 100644 index 1f51ae05c..000000000 --- a/crates/brk_computer/src/indexes/block/vecs.rs +++ /dev/null @@ -1,17 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, HalvingEpoch, Height, StoredU64}; -use vecdb::{EagerVec, PcoVec}; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub height_to_dateindex: EagerVec>, - pub height_to_difficultyepoch: EagerVec>, - pub height_to_halvingepoch: EagerVec>, - pub height_to_height: EagerVec>, - pub height_to_txindex_count: EagerVec>, - pub difficultyepoch_to_difficultyepoch: EagerVec>, - pub difficultyepoch_to_first_height: EagerVec>, - pub difficultyepoch_to_height_count: EagerVec>, - pub halvingepoch_to_first_height: EagerVec>, - pub halvingepoch_to_halvingepoch: EagerVec>, -} diff --git a/crates/brk_computer/src/indexes/dateindex.rs b/crates/brk_computer/src/indexes/dateindex.rs new file mode 100644 index 000000000..1024cc2eb --- /dev/null +++ b/crates/brk_computer/src/indexes/dateindex.rs @@ -0,0 +1,28 @@ +use brk_traversable::Traversable; +use brk_types::{Date, DateIndex, Height, MonthIndex, StoredU64, Version, WeekIndex}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub date: EagerVec>, + pub first_height: EagerVec>, + pub height_count: EagerVec>, + pub weekindex: EagerVec>, + pub monthindex: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "dateindex", version)?, + date: EagerVec::forced_import(db, "dateindex_date", version)?, + first_height: EagerVec::forced_import(db, "dateindex_first_height", version)?, + height_count: EagerVec::forced_import(db, "dateindex_height_count", version)?, + weekindex: EagerVec::forced_import(db, "dateindex_weekindex", version)?, + monthindex: EagerVec::forced_import(db, "dateindex_monthindex", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/decadeindex.rs b/crates/brk_computer/src/indexes/decadeindex.rs new file mode 100644 index 000000000..ac30b1dac --- /dev/null +++ b/crates/brk_computer/src/indexes/decadeindex.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{DecadeIndex, StoredU64, Version, YearIndex}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub first_yearindex: EagerVec>, + pub yearindex_count: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "decadeindex", version)?, + first_yearindex: EagerVec::forced_import(db, "decadeindex_first_yearindex", version)?, + yearindex_count: EagerVec::forced_import(db, "decadeindex_yearindex_count", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/difficultyepoch.rs b/crates/brk_computer/src/indexes/difficultyepoch.rs new file mode 100644 index 000000000..a844c3bcb --- /dev/null +++ b/crates/brk_computer/src/indexes/difficultyepoch.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{DifficultyEpoch, Height, StoredU64, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub first_height: EagerVec>, + pub height_count: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "difficultyepoch", version)?, + first_height: EagerVec::forced_import(db, "difficultyepoch_first_height", version)?, + height_count: EagerVec::forced_import(db, "difficultyepoch_height_count", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/halvingepoch.rs b/crates/brk_computer/src/indexes/halvingepoch.rs new file mode 100644 index 000000000..7f5b2833b --- /dev/null +++ b/crates/brk_computer/src/indexes/halvingepoch.rs @@ -0,0 +1,20 @@ +use brk_traversable::Traversable; +use brk_types::{HalvingEpoch, Height, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub first_height: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "halvingepoch", version)?, + first_height: EagerVec::forced_import(db, "halvingepoch_first_height", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/height.rs b/crates/brk_computer/src/indexes/height.rs new file mode 100644 index 000000000..9157b3046 --- /dev/null +++ b/crates/brk_computer/src/indexes/height.rs @@ -0,0 +1,26 @@ +use brk_traversable::Traversable; +use brk_types::{DateIndex, DifficultyEpoch, HalvingEpoch, Height, StoredU64, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub dateindex: EagerVec>, + pub difficultyepoch: EagerVec>, + pub halvingepoch: EagerVec>, + pub txindex_count: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "height", version)?, + dateindex: EagerVec::forced_import(db, "height_dateindex", version)?, + difficultyepoch: EagerVec::forced_import(db, "height_difficultyepoch", version)?, + halvingepoch: EagerVec::forced_import(db, "height_halvingepoch", version)?, + txindex_count: EagerVec::forced_import(db, "height_txindex_count", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/mod.rs b/crates/brk_computer/src/indexes/mod.rs index 2df7ac884..5f3f512b1 100644 --- a/crates/brk_computer/src/indexes/mod.rs +++ b/crates/brk_computer/src/indexes/mod.rs @@ -1,24 +1,43 @@ mod address; -mod block; -mod time; -mod transaction; +mod dateindex; +mod decadeindex; +mod difficultyepoch; +mod halvingepoch; +mod height; +mod monthindex; +mod quarterindex; +mod semesterindex; +mod txindex; +mod txinindex; +mod txoutindex; +mod weekindex; +mod yearindex; use std::path::Path; use brk_error::Result; use brk_indexer::Indexer; use brk_traversable::Traversable; -use brk_types::{Indexes, Version}; - -pub use brk_types::ComputeIndexes; -use vecdb::{Database, Exit, PAGE_SIZE}; +use brk_types::{DateIndex, Indexes, MonthIndex, Version, WeekIndex}; +use vecdb::{Database, Exit, TypedVecIterator, PAGE_SIZE}; use crate::blocks; pub use address::Vecs as AddressVecs; -pub use block::Vecs as BlockVecs; -pub use time::Vecs as TimeVecs; -pub use transaction::Vecs as TransactionVecs; +pub use brk_types::ComputeIndexes; +pub use dateindex::Vecs as DateIndexVecs; +pub use decadeindex::Vecs as DecadeIndexVecs; +pub use difficultyepoch::Vecs as DifficultyEpochVecs; +pub use halvingepoch::Vecs as HalvingEpochVecs; +pub use height::Vecs as HeightVecs; +pub use monthindex::Vecs as MonthIndexVecs; +pub use quarterindex::Vecs as QuarterIndexVecs; +pub use semesterindex::Vecs as SemesterIndexVecs; +pub use txindex::Vecs as TxIndexVecs; +pub use txinindex::Vecs as TxInIndexVecs; +pub use txoutindex::Vecs as TxOutIndexVecs; +pub use weekindex::Vecs as WeekIndexVecs; +pub use yearindex::Vecs as YearIndexVecs; const VERSION: Version = Version::ZERO; pub const DB_NAME: &str = "indexes"; @@ -27,9 +46,19 @@ pub const DB_NAME: &str = "indexes"; pub struct Vecs { db: Database, pub address: AddressVecs, - pub block: BlockVecs, - pub time: TimeVecs, - pub transaction: TransactionVecs, + pub height: HeightVecs, + pub difficultyepoch: DifficultyEpochVecs, + pub halvingepoch: HalvingEpochVecs, + pub dateindex: DateIndexVecs, + pub weekindex: WeekIndexVecs, + pub monthindex: MonthIndexVecs, + pub quarterindex: QuarterIndexVecs, + pub semesterindex: SemesterIndexVecs, + pub yearindex: YearIndexVecs, + pub decadeindex: DecadeIndexVecs, + pub txindex: TxIndexVecs, + pub txinindex: TxInIndexVecs, + pub txoutindex: TxOutIndexVecs, } impl Vecs { @@ -45,9 +74,19 @@ impl Vecs { let this = Self { address: AddressVecs::forced_import(version, indexer), - block: BlockVecs::forced_import(&db, version)?, - time: TimeVecs::forced_import(&db, version)?, - transaction: TransactionVecs::forced_import(&db, version, indexer)?, + height: HeightVecs::forced_import(&db, version)?, + difficultyepoch: DifficultyEpochVecs::forced_import(&db, version)?, + halvingepoch: HalvingEpochVecs::forced_import(&db, version)?, + dateindex: DateIndexVecs::forced_import(&db, version)?, + weekindex: WeekIndexVecs::forced_import(&db, version)?, + monthindex: MonthIndexVecs::forced_import(&db, version)?, + quarterindex: QuarterIndexVecs::forced_import(&db, version)?, + semesterindex: SemesterIndexVecs::forced_import(&db, version)?, + yearindex: YearIndexVecs::forced_import(&db, version)?, + decadeindex: DecadeIndexVecs::forced_import(&db, version)?, + txindex: TxIndexVecs::forced_import(&db, version, indexer)?, + txinindex: TxInIndexVecs::forced_import(version, indexer), + txoutindex: TxOutIndexVecs::forced_import(version, indexer), db, }; @@ -81,28 +120,356 @@ impl Vecs { starting_indexes: Indexes, exit: &Exit, ) -> Result { - // Transaction indexes - self.transaction.compute(indexer, &starting_indexes, exit)?; + // Transaction indexes - compute input/output counts + self.txindex.input_count.compute_count_from_indexes( + starting_indexes.txindex, + &indexer.vecs.transactions.first_txinindex, + &indexer.vecs.inputs.outpoint, + exit, + )?; + self.txindex.output_count.compute_count_from_indexes( + starting_indexes.txindex, + &indexer.vecs.transactions.first_txoutindex, + &indexer.vecs.outputs.value, + exit, + )?; - // Block indexes (height, dateindex, difficultyepoch, halvingepoch) - // Uses blocks_time.height_to_date_fixed computed in blocks::time::compute_early - let (starting_dateindex, starting_difficultyepoch, starting_halvingepoch) = - self.block.compute(indexer, blocks_time, &starting_indexes, exit)?; + // Height indexes + self.height.txindex_count.compute_count_from_indexes( + starting_indexes.height, + &indexer.vecs.transactions.first_txindex, + &indexer.vecs.transactions.txid, + exit, + )?; - // Time indexes (depends on block.height_to_dateindex) - let time_indexes = self - .time - .compute(indexer, &starting_indexes, starting_dateindex, &self.block, exit)?; + self.height.identity.compute_from_index( + starting_indexes.height, + &indexer.vecs.blocks.weight, + exit, + )?; + + let decremented_starting_height = starting_indexes.height.decremented().unwrap_or_default(); + + // DateIndex (uses blocks_time.date_fixed computed in blocks::time::compute_early) + let starting_dateindex = self + .height + .dateindex + .into_iter() + .get(decremented_starting_height) + .unwrap_or_default(); + + self.height.dateindex.compute_transform( + starting_indexes.height, + &blocks_time.date_fixed, + |(h, d, ..)| (h, DateIndex::try_from(d).unwrap()), + exit, + )?; + + let starting_dateindex = if let Some(dateindex) = self + .height + .dateindex + .into_iter() + .get(decremented_starting_height) + { + starting_dateindex.min(dateindex) + } else { + starting_dateindex + }; + + // Difficulty epoch + let starting_difficultyepoch = self + .height + .difficultyepoch + .into_iter() + .get(decremented_starting_height) + .unwrap_or_default(); + + self.height.difficultyepoch.compute_from_index( + starting_indexes.height, + &indexer.vecs.blocks.weight, + exit, + )?; + + self.difficultyepoch.first_height.compute_coarser( + starting_indexes.height, + &self.height.difficultyepoch, + exit, + )?; + + self.difficultyepoch.identity.compute_from_index( + starting_difficultyepoch, + &self.difficultyepoch.first_height, + exit, + )?; + + self.difficultyepoch.height_count.compute_count_from_indexes( + starting_difficultyepoch, + &self.difficultyepoch.first_height, + &blocks_time.date, + exit, + )?; + + // Halving epoch + let starting_halvingepoch = self + .height + .halvingepoch + .into_iter() + .get(decremented_starting_height) + .unwrap_or_default(); + + self.height.halvingepoch.compute_from_index( + starting_indexes.height, + &indexer.vecs.blocks.weight, + exit, + )?; + + self.halvingepoch.first_height.compute_coarser( + starting_indexes.height, + &self.height.halvingepoch, + exit, + )?; + + self.halvingepoch.identity.compute_from_index( + starting_halvingepoch, + &self.halvingepoch.first_height, + exit, + )?; + + // Time indexes (depends on height.dateindex) + self.dateindex.first_height.compute_coarser( + starting_indexes.height, + &self.height.dateindex, + exit, + )?; + + self.dateindex.identity.compute_from_index( + starting_dateindex, + &self.dateindex.first_height, + exit, + )?; + + self.dateindex.date.compute_from_index( + starting_dateindex, + &self.dateindex.first_height, + exit, + )?; + + self.dateindex.height_count.compute_count_from_indexes( + starting_dateindex, + &self.dateindex.first_height, + &indexer.vecs.blocks.weight, + exit, + )?; + + // Week + let starting_weekindex = self + .dateindex + .weekindex + .into_iter() + .get(starting_dateindex) + .unwrap_or_default(); + + self.dateindex.weekindex.compute_range( + starting_dateindex, + &self.dateindex.identity, + |i| (i, WeekIndex::from(i)), + exit, + )?; + + self.weekindex.first_dateindex.compute_coarser( + starting_dateindex, + &self.dateindex.weekindex, + exit, + )?; + + self.weekindex.identity.compute_from_index( + starting_weekindex, + &self.weekindex.first_dateindex, + exit, + )?; + + self.weekindex.dateindex_count.compute_count_from_indexes( + starting_weekindex, + &self.weekindex.first_dateindex, + &self.dateindex.date, + exit, + )?; + + // Month + let starting_monthindex = self + .dateindex + .monthindex + .into_iter() + .get(starting_dateindex) + .unwrap_or_default(); + + self.dateindex.monthindex.compute_range( + starting_dateindex, + &self.dateindex.identity, + |i| (i, MonthIndex::from(i)), + exit, + )?; + + self.monthindex.first_dateindex.compute_coarser( + starting_dateindex, + &self.dateindex.monthindex, + exit, + )?; + + self.monthindex.identity.compute_from_index( + starting_monthindex, + &self.monthindex.first_dateindex, + exit, + )?; + + self.monthindex.dateindex_count.compute_count_from_indexes( + starting_monthindex, + &self.monthindex.first_dateindex, + &self.dateindex.date, + exit, + )?; + + // Quarter + let starting_quarterindex = self + .monthindex + .quarterindex + .into_iter() + .get(starting_monthindex) + .unwrap_or_default(); + + self.monthindex.quarterindex.compute_from_index( + starting_monthindex, + &self.monthindex.first_dateindex, + exit, + )?; + + self.quarterindex.first_monthindex.compute_coarser( + starting_monthindex, + &self.monthindex.quarterindex, + exit, + )?; + + self.quarterindex.identity.compute_from_index( + starting_quarterindex, + &self.quarterindex.first_monthindex, + exit, + )?; + + self.quarterindex.monthindex_count.compute_count_from_indexes( + starting_quarterindex, + &self.quarterindex.first_monthindex, + &self.monthindex.identity, + exit, + )?; + + // Semester + let starting_semesterindex = self + .monthindex + .semesterindex + .into_iter() + .get(starting_monthindex) + .unwrap_or_default(); + + self.monthindex.semesterindex.compute_from_index( + starting_monthindex, + &self.monthindex.first_dateindex, + exit, + )?; + + self.semesterindex.first_monthindex.compute_coarser( + starting_monthindex, + &self.monthindex.semesterindex, + exit, + )?; + + self.semesterindex.identity.compute_from_index( + starting_semesterindex, + &self.semesterindex.first_monthindex, + exit, + )?; + + self.semesterindex.monthindex_count.compute_count_from_indexes( + starting_semesterindex, + &self.semesterindex.first_monthindex, + &self.monthindex.identity, + exit, + )?; + + // Year + let starting_yearindex = self + .monthindex + .yearindex + .into_iter() + .get(starting_monthindex) + .unwrap_or_default(); + + self.monthindex.yearindex.compute_from_index( + starting_monthindex, + &self.monthindex.first_dateindex, + exit, + )?; + + self.yearindex.first_monthindex.compute_coarser( + starting_monthindex, + &self.monthindex.yearindex, + exit, + )?; + + self.yearindex.identity.compute_from_index( + starting_yearindex, + &self.yearindex.first_monthindex, + exit, + )?; + + self.yearindex.monthindex_count.compute_count_from_indexes( + starting_yearindex, + &self.yearindex.first_monthindex, + &self.monthindex.identity, + exit, + )?; + + // Decade + let starting_decadeindex = self + .yearindex + .decadeindex + .into_iter() + .get(starting_yearindex) + .unwrap_or_default(); + + self.yearindex.decadeindex.compute_from_index( + starting_yearindex, + &self.yearindex.first_monthindex, + exit, + )?; + + self.decadeindex.first_yearindex.compute_coarser( + starting_yearindex, + &self.yearindex.decadeindex, + exit, + )?; + + self.decadeindex.identity.compute_from_index( + starting_decadeindex, + &self.decadeindex.first_yearindex, + exit, + )?; + + self.decadeindex.yearindex_count.compute_count_from_indexes( + starting_decadeindex, + &self.decadeindex.first_yearindex, + &self.yearindex.identity, + exit, + )?; Ok(ComputeIndexes::new( starting_indexes, - time_indexes.dateindex, - time_indexes.weekindex, - time_indexes.monthindex, - time_indexes.quarterindex, - time_indexes.semesterindex, - time_indexes.yearindex, - time_indexes.decadeindex, + starting_dateindex, + starting_weekindex, + starting_monthindex, + starting_quarterindex, + starting_semesterindex, + starting_yearindex, + starting_decadeindex, starting_difficultyepoch, starting_halvingepoch, )) diff --git a/crates/brk_computer/src/indexes/monthindex.rs b/crates/brk_computer/src/indexes/monthindex.rs new file mode 100644 index 000000000..28d0c918a --- /dev/null +++ b/crates/brk_computer/src/indexes/monthindex.rs @@ -0,0 +1,28 @@ +use brk_traversable::Traversable; +use brk_types::{DateIndex, MonthIndex, QuarterIndex, SemesterIndex, StoredU64, Version, YearIndex}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub first_dateindex: EagerVec>, + pub dateindex_count: EagerVec>, + pub quarterindex: EagerVec>, + pub semesterindex: EagerVec>, + pub yearindex: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "monthindex", version)?, + first_dateindex: EagerVec::forced_import(db, "monthindex_first_dateindex", version)?, + dateindex_count: EagerVec::forced_import(db, "monthindex_dateindex_count", version)?, + quarterindex: EagerVec::forced_import(db, "monthindex_quarterindex", version)?, + semesterindex: EagerVec::forced_import(db, "monthindex_semesterindex", version)?, + yearindex: EagerVec::forced_import(db, "monthindex_yearindex", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/quarterindex.rs b/crates/brk_computer/src/indexes/quarterindex.rs new file mode 100644 index 000000000..3a83ddf10 --- /dev/null +++ b/crates/brk_computer/src/indexes/quarterindex.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{MonthIndex, QuarterIndex, StoredU64, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub first_monthindex: EagerVec>, + pub monthindex_count: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "quarterindex", version)?, + first_monthindex: EagerVec::forced_import(db, "quarterindex_first_monthindex", version)?, + monthindex_count: EagerVec::forced_import(db, "quarterindex_monthindex_count", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/semesterindex.rs b/crates/brk_computer/src/indexes/semesterindex.rs new file mode 100644 index 000000000..f6ce93035 --- /dev/null +++ b/crates/brk_computer/src/indexes/semesterindex.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{MonthIndex, SemesterIndex, StoredU64, Version}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub first_monthindex: EagerVec>, + pub monthindex_count: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "semesterindex", version)?, + first_monthindex: EagerVec::forced_import(db, "semesterindex_first_monthindex", version)?, + monthindex_count: EagerVec::forced_import(db, "semesterindex_monthindex_count", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/time/compute.rs b/crates/brk_computer/src/indexes/time/compute.rs deleted file mode 100644 index 189a52397..000000000 --- a/crates/brk_computer/src/indexes/time/compute.rs +++ /dev/null @@ -1,246 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::{DateIndex, MonthIndex, WeekIndex}; -use vecdb::{Exit, TypedVecIterator}; - -use super::{super::block, vecs::StartingTimeIndexes, Vecs}; - -impl Vecs { - pub fn compute( - &mut self, - indexer: &Indexer, - starting_indexes: &brk_indexer::Indexes, - starting_dateindex: DateIndex, - block_vecs: &block::Vecs, - exit: &Exit, - ) -> Result { - self.dateindex_to_first_height.compute_coarser( - starting_indexes.height, - &block_vecs.height_to_dateindex, - exit, - )?; - - self.dateindex_to_dateindex.compute_from_index( - starting_dateindex, - &self.dateindex_to_first_height, - exit, - )?; - - self.dateindex_to_date.compute_from_index( - starting_dateindex, - &self.dateindex_to_first_height, - exit, - )?; - - self.dateindex_to_height_count.compute_count_from_indexes( - starting_dateindex, - &self.dateindex_to_first_height, - &indexer.vecs.block.height_to_weight, - exit, - )?; - - // Week - let starting_weekindex = self - .dateindex_to_weekindex - .into_iter() - .get(starting_dateindex) - .unwrap_or_default(); - - self.dateindex_to_weekindex.compute_range( - starting_dateindex, - &self.dateindex_to_dateindex, - |i| (i, WeekIndex::from(i)), - exit, - )?; - - self.weekindex_to_first_dateindex.compute_coarser( - starting_dateindex, - &self.dateindex_to_weekindex, - exit, - )?; - - self.weekindex_to_weekindex.compute_from_index( - starting_weekindex, - &self.weekindex_to_first_dateindex, - exit, - )?; - - self.weekindex_to_dateindex_count.compute_count_from_indexes( - starting_weekindex, - &self.weekindex_to_first_dateindex, - &self.dateindex_to_date, - exit, - )?; - - // Month - let starting_monthindex = self - .dateindex_to_monthindex - .into_iter() - .get(starting_dateindex) - .unwrap_or_default(); - - self.dateindex_to_monthindex.compute_range( - starting_dateindex, - &self.dateindex_to_dateindex, - |i| (i, MonthIndex::from(i)), - exit, - )?; - - self.monthindex_to_first_dateindex.compute_coarser( - starting_dateindex, - &self.dateindex_to_monthindex, - exit, - )?; - - self.monthindex_to_monthindex.compute_from_index( - starting_monthindex, - &self.monthindex_to_first_dateindex, - exit, - )?; - - self.monthindex_to_dateindex_count.compute_count_from_indexes( - starting_monthindex, - &self.monthindex_to_first_dateindex, - &self.dateindex_to_date, - exit, - )?; - - // Quarter - let starting_quarterindex = self - .monthindex_to_quarterindex - .into_iter() - .get(starting_monthindex) - .unwrap_or_default(); - - self.monthindex_to_quarterindex.compute_from_index( - starting_monthindex, - &self.monthindex_to_first_dateindex, - exit, - )?; - - self.quarterindex_to_first_monthindex.compute_coarser( - starting_monthindex, - &self.monthindex_to_quarterindex, - exit, - )?; - - self.quarterindex_to_quarterindex.compute_from_index( - starting_quarterindex, - &self.quarterindex_to_first_monthindex, - exit, - )?; - - self.quarterindex_to_monthindex_count.compute_count_from_indexes( - starting_quarterindex, - &self.quarterindex_to_first_monthindex, - &self.monthindex_to_monthindex, - exit, - )?; - - // Semester - let starting_semesterindex = self - .monthindex_to_semesterindex - .into_iter() - .get(starting_monthindex) - .unwrap_or_default(); - - self.monthindex_to_semesterindex.compute_from_index( - starting_monthindex, - &self.monthindex_to_first_dateindex, - exit, - )?; - - self.semesterindex_to_first_monthindex.compute_coarser( - starting_monthindex, - &self.monthindex_to_semesterindex, - exit, - )?; - - self.semesterindex_to_semesterindex.compute_from_index( - starting_semesterindex, - &self.semesterindex_to_first_monthindex, - exit, - )?; - - self.semesterindex_to_monthindex_count.compute_count_from_indexes( - starting_semesterindex, - &self.semesterindex_to_first_monthindex, - &self.monthindex_to_monthindex, - exit, - )?; - - // Year - let starting_yearindex = self - .monthindex_to_yearindex - .into_iter() - .get(starting_monthindex) - .unwrap_or_default(); - - self.monthindex_to_yearindex.compute_from_index( - starting_monthindex, - &self.monthindex_to_first_dateindex, - exit, - )?; - - self.yearindex_to_first_monthindex.compute_coarser( - starting_monthindex, - &self.monthindex_to_yearindex, - exit, - )?; - - self.yearindex_to_yearindex.compute_from_index( - starting_yearindex, - &self.yearindex_to_first_monthindex, - exit, - )?; - - self.yearindex_to_monthindex_count.compute_count_from_indexes( - starting_yearindex, - &self.yearindex_to_first_monthindex, - &self.monthindex_to_monthindex, - exit, - )?; - - // Decade - let starting_decadeindex = self - .yearindex_to_decadeindex - .into_iter() - .get(starting_yearindex) - .unwrap_or_default(); - - self.yearindex_to_decadeindex.compute_from_index( - starting_yearindex, - &self.yearindex_to_first_monthindex, - exit, - )?; - - self.decadeindex_to_first_yearindex.compute_coarser( - starting_yearindex, - &self.yearindex_to_decadeindex, - exit, - )?; - - self.decadeindex_to_decadeindex.compute_from_index( - starting_decadeindex, - &self.decadeindex_to_first_yearindex, - exit, - )?; - - self.decadeindex_to_yearindex_count.compute_count_from_indexes( - starting_decadeindex, - &self.decadeindex_to_first_yearindex, - &self.yearindex_to_yearindex, - exit, - )?; - - Ok(StartingTimeIndexes { - dateindex: starting_dateindex, - weekindex: starting_weekindex, - monthindex: starting_monthindex, - quarterindex: starting_quarterindex, - semesterindex: starting_semesterindex, - yearindex: starting_yearindex, - decadeindex: starting_decadeindex, - }) - } -} diff --git a/crates/brk_computer/src/indexes/time/import.rs b/crates/brk_computer/src/indexes/time/import.rs deleted file mode 100644 index 4cbc44dae..000000000 --- a/crates/brk_computer/src/indexes/time/import.rs +++ /dev/null @@ -1,40 +0,0 @@ -use brk_error::Result; -use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec}; - -use super::Vecs; - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - dateindex_to_date: EagerVec::forced_import(db, "date", version)?, - dateindex_to_dateindex: EagerVec::forced_import(db, "dateindex", version)?, - dateindex_to_first_height: EagerVec::forced_import(db, "first_height", version)?, - dateindex_to_height_count: EagerVec::forced_import(db, "height_count", version)?, - dateindex_to_monthindex: EagerVec::forced_import(db, "monthindex", version)?, - dateindex_to_weekindex: EagerVec::forced_import(db, "weekindex", version)?, - weekindex_to_dateindex_count: EagerVec::forced_import(db, "dateindex_count", version)?, - weekindex_to_first_dateindex: EagerVec::forced_import(db, "first_dateindex", version)?, - weekindex_to_weekindex: EagerVec::forced_import(db, "weekindex", version)?, - monthindex_to_dateindex_count: EagerVec::forced_import(db, "dateindex_count", version)?, - monthindex_to_first_dateindex: EagerVec::forced_import(db, "first_dateindex", version)?, - monthindex_to_monthindex: EagerVec::forced_import(db, "monthindex", version)?, - monthindex_to_quarterindex: EagerVec::forced_import(db, "quarterindex", version)?, - monthindex_to_semesterindex: EagerVec::forced_import(db, "semesterindex", version)?, - monthindex_to_yearindex: EagerVec::forced_import(db, "yearindex", version)?, - quarterindex_to_first_monthindex: EagerVec::forced_import(db, "first_monthindex", version)?, - quarterindex_to_monthindex_count: EagerVec::forced_import(db, "monthindex_count", version)?, - quarterindex_to_quarterindex: EagerVec::forced_import(db, "quarterindex", version)?, - semesterindex_to_first_monthindex: EagerVec::forced_import(db, "first_monthindex", version)?, - semesterindex_to_monthindex_count: EagerVec::forced_import(db, "monthindex_count", version)?, - semesterindex_to_semesterindex: EagerVec::forced_import(db, "semesterindex", version)?, - yearindex_to_decadeindex: EagerVec::forced_import(db, "decadeindex", version)?, - yearindex_to_first_monthindex: EagerVec::forced_import(db, "first_monthindex", version)?, - yearindex_to_monthindex_count: EagerVec::forced_import(db, "monthindex_count", version)?, - yearindex_to_yearindex: EagerVec::forced_import(db, "yearindex", version)?, - decadeindex_to_decadeindex: EagerVec::forced_import(db, "decadeindex", version)?, - decadeindex_to_first_yearindex: EagerVec::forced_import(db, "first_yearindex", version)?, - decadeindex_to_yearindex_count: EagerVec::forced_import(db, "yearindex_count", version)?, - }) - } -} diff --git a/crates/brk_computer/src/indexes/time/mod.rs b/crates/brk_computer/src/indexes/time/mod.rs deleted file mode 100644 index 1136f9ebd..000000000 --- a/crates/brk_computer/src/indexes/time/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod compute; -mod import; -mod vecs; - -pub use vecs::Vecs; diff --git a/crates/brk_computer/src/indexes/time/vecs.rs b/crates/brk_computer/src/indexes/time/vecs.rs deleted file mode 100644 index 58d284069..000000000 --- a/crates/brk_computer/src/indexes/time/vecs.rs +++ /dev/null @@ -1,48 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{ - Date, DateIndex, DecadeIndex, Height, MonthIndex, QuarterIndex, SemesterIndex, StoredU64, - WeekIndex, YearIndex, -}; -use vecdb::{EagerVec, PcoVec}; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub dateindex_to_date: EagerVec>, - pub dateindex_to_dateindex: EagerVec>, - pub dateindex_to_first_height: EagerVec>, - pub dateindex_to_height_count: EagerVec>, - pub dateindex_to_monthindex: EagerVec>, - pub dateindex_to_weekindex: EagerVec>, - pub weekindex_to_dateindex_count: EagerVec>, - pub weekindex_to_first_dateindex: EagerVec>, - pub weekindex_to_weekindex: EagerVec>, - pub monthindex_to_dateindex_count: EagerVec>, - pub monthindex_to_first_dateindex: EagerVec>, - pub monthindex_to_monthindex: EagerVec>, - pub monthindex_to_quarterindex: EagerVec>, - pub monthindex_to_semesterindex: EagerVec>, - pub monthindex_to_yearindex: EagerVec>, - pub quarterindex_to_first_monthindex: EagerVec>, - pub quarterindex_to_monthindex_count: EagerVec>, - pub quarterindex_to_quarterindex: EagerVec>, - pub semesterindex_to_first_monthindex: EagerVec>, - pub semesterindex_to_monthindex_count: EagerVec>, - pub semesterindex_to_semesterindex: EagerVec>, - pub yearindex_to_decadeindex: EagerVec>, - pub yearindex_to_first_monthindex: EagerVec>, - pub yearindex_to_monthindex_count: EagerVec>, - pub yearindex_to_yearindex: EagerVec>, - pub decadeindex_to_decadeindex: EagerVec>, - pub decadeindex_to_first_yearindex: EagerVec>, - pub decadeindex_to_yearindex_count: EagerVec>, -} - -pub struct StartingTimeIndexes { - pub dateindex: DateIndex, - pub weekindex: WeekIndex, - pub monthindex: MonthIndex, - pub quarterindex: QuarterIndex, - pub semesterindex: SemesterIndex, - pub yearindex: YearIndex, - pub decadeindex: DecadeIndex, -} diff --git a/crates/brk_computer/src/indexes/transaction/compute.rs b/crates/brk_computer/src/indexes/transaction/compute.rs deleted file mode 100644 index f30401e63..000000000 --- a/crates/brk_computer/src/indexes/transaction/compute.rs +++ /dev/null @@ -1,25 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use vecdb::Exit; - -use super::Vecs; - -impl Vecs { - pub fn compute(&mut self, indexer: &Indexer, starting_indexes: &brk_indexer::Indexes, exit: &Exit) -> Result<()> { - self.txindex_to_input_count.compute_count_from_indexes( - starting_indexes.txindex, - &indexer.vecs.tx.txindex_to_first_txinindex, - &indexer.vecs.txin.txinindex_to_outpoint, - exit, - )?; - - self.txindex_to_output_count.compute_count_from_indexes( - starting_indexes.txindex, - &indexer.vecs.tx.txindex_to_first_txoutindex, - &indexer.vecs.txout.txoutindex_to_value, - exit, - )?; - - Ok(()) - } -} diff --git a/crates/brk_computer/src/indexes/transaction/import.rs b/crates/brk_computer/src/indexes/transaction/import.rs deleted file mode 100644 index 9dfe8fcca..000000000 --- a/crates/brk_computer/src/indexes/transaction/import.rs +++ /dev/null @@ -1,33 +0,0 @@ -use brk_error::Result; -use brk_indexer::Indexer; -use brk_types::Version; -use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1}; - -use super::Vecs; - -impl Vecs { - pub fn forced_import(db: &Database, version: Version, indexer: &Indexer) -> Result { - Ok(Self { - txindex_to_input_count: EagerVec::forced_import(db, "input_count", version)?, - txindex_to_output_count: EagerVec::forced_import(db, "output_count", version)?, - txindex_to_txindex: LazyVecFrom1::init( - "txindex", - version, - indexer.vecs.tx.txindex_to_txid.boxed_clone(), - |index, _| Some(index), - ), - txinindex_to_txinindex: LazyVecFrom1::init( - "txinindex", - version, - indexer.vecs.txin.txinindex_to_outpoint.boxed_clone(), - |index, _| Some(index), - ), - txoutindex_to_txoutindex: LazyVecFrom1::init( - "txoutindex", - version, - indexer.vecs.txout.txoutindex_to_value.boxed_clone(), - |index, _| Some(index), - ), - }) - } -} diff --git a/crates/brk_computer/src/indexes/transaction/mod.rs b/crates/brk_computer/src/indexes/transaction/mod.rs deleted file mode 100644 index 1136f9ebd..000000000 --- a/crates/brk_computer/src/indexes/transaction/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod compute; -mod import; -mod vecs; - -pub use vecs::Vecs; diff --git a/crates/brk_computer/src/indexes/transaction/vecs.rs b/crates/brk_computer/src/indexes/transaction/vecs.rs deleted file mode 100644 index c610e69b4..000000000 --- a/crates/brk_computer/src/indexes/transaction/vecs.rs +++ /dev/null @@ -1,12 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{OutPoint, Sats, StoredU64, TxInIndex, TxIndex, TxOutIndex, Txid}; -use vecdb::{EagerVec, LazyVecFrom1, PcoVec}; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub txindex_to_input_count: EagerVec>, - pub txindex_to_output_count: EagerVec>, - pub txindex_to_txindex: LazyVecFrom1, - pub txinindex_to_txinindex: LazyVecFrom1, - pub txoutindex_to_txoutindex: LazyVecFrom1, -} diff --git a/crates/brk_computer/src/indexes/txindex.rs b/crates/brk_computer/src/indexes/txindex.rs new file mode 100644 index 000000000..50a9aa361 --- /dev/null +++ b/crates/brk_computer/src/indexes/txindex.rs @@ -0,0 +1,28 @@ +use brk_indexer::Indexer; +use brk_traversable::Traversable; +use brk_types::{StoredU64, TxIndex, Txid, Version}; +use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: LazyVecFrom1, + pub input_count: EagerVec>, + pub output_count: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version, indexer: &Indexer) -> Result { + Ok(Self { + identity: LazyVecFrom1::init( + "txindex", + version, + indexer.vecs.transactions.txid.boxed_clone(), + |index, _| Some(index), + ), + input_count: EagerVec::forced_import(db, "txindex_input_count", version)?, + output_count: EagerVec::forced_import(db, "txindex_output_count", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/txinindex.rs b/crates/brk_computer/src/indexes/txinindex.rs new file mode 100644 index 000000000..d281dc762 --- /dev/null +++ b/crates/brk_computer/src/indexes/txinindex.rs @@ -0,0 +1,22 @@ +use brk_indexer::Indexer; +use brk_traversable::Traversable; +use brk_types::{OutPoint, TxInIndex, Version}; +use vecdb::{IterableCloneableVec, LazyVecFrom1}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: LazyVecFrom1, +} + +impl Vecs { + pub fn forced_import(version: Version, indexer: &Indexer) -> Self { + Self { + identity: LazyVecFrom1::init( + "txinindex", + version, + indexer.vecs.inputs.outpoint.boxed_clone(), + |index, _| Some(index), + ), + } + } +} diff --git a/crates/brk_computer/src/indexes/txoutindex.rs b/crates/brk_computer/src/indexes/txoutindex.rs new file mode 100644 index 000000000..1baff3b25 --- /dev/null +++ b/crates/brk_computer/src/indexes/txoutindex.rs @@ -0,0 +1,22 @@ +use brk_indexer::Indexer; +use brk_traversable::Traversable; +use brk_types::{Sats, TxOutIndex, Version}; +use vecdb::{IterableCloneableVec, LazyVecFrom1}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: LazyVecFrom1, +} + +impl Vecs { + pub fn forced_import(version: Version, indexer: &Indexer) -> Self { + Self { + identity: LazyVecFrom1::init( + "txoutindex", + version, + indexer.vecs.outputs.value.boxed_clone(), + |index, _| Some(index), + ), + } + } +} diff --git a/crates/brk_computer/src/indexes/weekindex.rs b/crates/brk_computer/src/indexes/weekindex.rs new file mode 100644 index 000000000..f05e388ba --- /dev/null +++ b/crates/brk_computer/src/indexes/weekindex.rs @@ -0,0 +1,22 @@ +use brk_traversable::Traversable; +use brk_types::{DateIndex, StoredU64, Version, WeekIndex}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub first_dateindex: EagerVec>, + pub dateindex_count: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "weekindex", version)?, + first_dateindex: EagerVec::forced_import(db, "weekindex_first_dateindex", version)?, + dateindex_count: EagerVec::forced_import(db, "weekindex_dateindex_count", version)?, + }) + } +} diff --git a/crates/brk_computer/src/indexes/yearindex.rs b/crates/brk_computer/src/indexes/yearindex.rs new file mode 100644 index 000000000..ffb166cc2 --- /dev/null +++ b/crates/brk_computer/src/indexes/yearindex.rs @@ -0,0 +1,24 @@ +use brk_traversable::Traversable; +use brk_types::{DecadeIndex, MonthIndex, StoredU64, Version, YearIndex}; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec}; + +use brk_error::Result; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub identity: EagerVec>, + pub first_monthindex: EagerVec>, + pub monthindex_count: EagerVec>, + pub decadeindex: EagerVec>, +} + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + Ok(Self { + identity: EagerVec::forced_import(db, "yearindex", version)?, + first_monthindex: EagerVec::forced_import(db, "yearindex_first_monthindex", version)?, + monthindex_count: EagerVec::forced_import(db, "yearindex_monthindex_count", version)?, + decadeindex: EagerVec::forced_import(db, "yearindex_decadeindex", version)?, + }) + } +} diff --git a/crates/brk_computer/src/inputs/count/compute.rs b/crates/brk_computer/src/inputs/count/compute.rs index 90dcbc690..2a38e4f34 100644 --- a/crates/brk_computer/src/inputs/count/compute.rs +++ b/crates/brk_computer/src/inputs/count/compute.rs @@ -3,7 +3,7 @@ use brk_indexer::Indexer; use vecdb::Exit; use super::Vecs; -use crate::{indexes, ComputeIndexes}; +use crate::{ComputeIndexes, indexes}; impl Vecs { pub fn compute( @@ -13,14 +13,13 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_count.derive_from( + self.derive_from( indexer, indexes, starting_indexes, - &indexes.transaction.txindex_to_input_count, + &indexes.txindex.input_count, exit, )?; - Ok(()) } } diff --git a/crates/brk_computer/src/inputs/count/import.rs b/crates/brk_computer/src/inputs/count/import.rs index 1407f6814..de3d38af4 100644 --- a/crates/brk_computer/src/inputs/count/import.rs +++ b/crates/brk_computer/src/inputs/count/import.rs @@ -7,9 +7,11 @@ use crate::{indexes, internal::DerivedTxFull}; impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { - let indexes_to_count = - DerivedTxFull::forced_import(db, "input_count", version, indexes)?; - - Ok(Self { indexes_to_count }) + Ok(Self(DerivedTxFull::forced_import( + db, + "input_count", + version, + indexes, + )?)) } } diff --git a/crates/brk_computer/src/inputs/count/vecs.rs b/crates/brk_computer/src/inputs/count/vecs.rs index 855957b0d..13cc020e8 100644 --- a/crates/brk_computer/src/inputs/count/vecs.rs +++ b/crates/brk_computer/src/inputs/count/vecs.rs @@ -1,9 +1,8 @@ use brk_traversable::Traversable; use brk_types::StoredU64; +use derive_more::{Deref, DerefMut}; use crate::internal::DerivedTxFull; -#[derive(Clone, Traversable)] -pub struct Vecs { - pub indexes_to_count: DerivedTxFull, -} +#[derive(Clone, Deref, DerefMut, Traversable)] +pub struct Vecs(pub DerivedTxFull); diff --git a/crates/brk_computer/src/inputs/spent/compute.rs b/crates/brk_computer/src/inputs/spent/compute.rs index dc4e4633b..63e1cfc0b 100644 --- a/crates/brk_computer/src/inputs/spent/compute.rs +++ b/crates/brk_computer/src/inputs/spent/compute.rs @@ -1,7 +1,7 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{Sats, TxInIndex, TxIndex, TxOutIndex, Vout}; -use log::info; +use tracing::info; use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, TypedVecIterator, VecIndex}; use super::Vecs; @@ -18,21 +18,20 @@ impl Vecs { exit: &Exit, ) -> Result<()> { // Validate computed versions against dependencies - let dep_version = indexer.vecs.txin.txinindex_to_outpoint.version() - + indexer.vecs.tx.txindex_to_first_txoutindex.version() - + indexer.vecs.txout.txoutindex_to_value.version(); - self.txinindex_to_txoutindex - .validate_computed_version_or_reset(dep_version)?; - self.txinindex_to_value + let dep_version = indexer.vecs.inputs.outpoint.version() + + indexer.vecs.transactions.first_txoutindex.version() + + indexer.vecs.outputs.value.version(); + self.txoutindex .validate_computed_version_or_reset(dep_version)?; + self.value.validate_computed_version_or_reset(dep_version)?; - let target = indexer.vecs.txin.txinindex_to_outpoint.len(); + let target = indexer.vecs.inputs.outpoint.len(); if target == 0 { return Ok(()); } - let len1 = self.txinindex_to_txoutindex.len(); - let len2 = self.txinindex_to_value.len(); + let len1 = self.txoutindex.len(); + let len2 = self.value.len(); let starting = starting_indexes.txinindex.to_usize(); let min = len1.min(len2).min(starting); @@ -40,9 +39,9 @@ impl Vecs { return Ok(()); } - let mut outpoint_iter = indexer.vecs.txin.txinindex_to_outpoint.iter()?; - let mut first_txoutindex_iter = indexer.vecs.tx.txindex_to_first_txoutindex.iter()?; - let mut value_iter = indexer.vecs.txout.txoutindex_to_value.iter()?; + let mut outpoint_iter = indexer.vecs.inputs.outpoint.iter()?; + let mut first_txoutindex_iter = indexer.vecs.transactions.first_txoutindex.iter()?; + let mut value_iter = indexer.vecs.outputs.value.iter()?; let mut entries: Vec = Vec::with_capacity(BATCH_SIZE); let mut batch_start = min; @@ -81,10 +80,9 @@ impl Vecs { entries.sort_unstable_by_key(|e| e.txinindex); for entry in &entries { - self.txinindex_to_txoutindex + self.txoutindex .truncate_push(entry.txinindex, entry.txoutindex)?; - self.txinindex_to_value - .truncate_push(entry.txinindex, entry.value)?; + self.value.truncate_push(entry.txinindex, entry.value)?; } if batch_end < target { @@ -92,8 +90,8 @@ impl Vecs { } let _lock = exit.lock(); - self.txinindex_to_txoutindex.write()?; - self.txinindex_to_value.write()?; + self.txoutindex.write()?; + self.value.write()?; db.flush()?; batch_start = batch_end; diff --git a/crates/brk_computer/src/inputs/spent/import.rs b/crates/brk_computer/src/inputs/spent/import.rs index 388ab0159..5b55fc9fb 100644 --- a/crates/brk_computer/src/inputs/spent/import.rs +++ b/crates/brk_computer/src/inputs/spent/import.rs @@ -7,8 +7,8 @@ use super::Vecs; impl Vecs { pub fn forced_import(db: &Database, version: Version) -> Result { Ok(Self { - txinindex_to_txoutindex: PcoVec::forced_import(db, "txoutindex", version)?, - txinindex_to_value: PcoVec::forced_import(db, "value", version)?, + txoutindex: PcoVec::forced_import(db, "txoutindex", version)?, + value: PcoVec::forced_import(db, "value", version)?, }) } } diff --git a/crates/brk_computer/src/inputs/spent/vecs.rs b/crates/brk_computer/src/inputs/spent/vecs.rs index 3abdee1e1..241a9fb82 100644 --- a/crates/brk_computer/src/inputs/spent/vecs.rs +++ b/crates/brk_computer/src/inputs/spent/vecs.rs @@ -4,6 +4,6 @@ use vecdb::PcoVec; #[derive(Clone, Traversable)] pub struct Vecs { - pub txinindex_to_txoutindex: PcoVec, - pub txinindex_to_value: PcoVec, + pub txoutindex: PcoVec, + pub value: PcoVec, } diff --git a/crates/brk_computer/src/internal/aggregation/average.rs b/crates/brk_computer/src/internal/aggregation/average.rs index da50c2560..8b602ae36 100644 --- a/crates/brk_computer/src/internal/aggregation/average.rs +++ b/crates/brk_computer/src/internal/aggregation/average.rs @@ -11,7 +11,7 @@ use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; #[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(wrap = "average")] +#[traversable(transparent)] pub struct LazyAverage(pub LazyVecFrom2) where I: VecIndex, @@ -31,9 +31,27 @@ where version: Version, source: IterableBoxedVec, len_source: IterableBoxedVec, + ) -> Self { + Self::from_source_inner(&format!("{name}_average"), version, source, len_source) + } + + pub fn from_source_raw( + name: &str, + version: Version, + source: IterableBoxedVec, + len_source: IterableBoxedVec, + ) -> Self { + Self::from_source_inner(name, version, source, len_source) + } + + fn from_source_inner( + name: &str, + version: Version, + source: IterableBoxedVec, + len_source: IterableBoxedVec, ) -> Self { Self(LazyVecFrom2::init( - &format!("{name}_average"), + name, version + VERSION, source, len_source, diff --git a/crates/brk_computer/src/internal/aggregation/cumulative.rs b/crates/brk_computer/src/internal/aggregation/cumulative.rs index 6913e1f13..287552b59 100644 --- a/crates/brk_computer/src/internal/aggregation/cumulative.rs +++ b/crates/brk_computer/src/internal/aggregation/cumulative.rs @@ -11,7 +11,7 @@ use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; #[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(wrap = "cumulative")] +#[traversable(transparent)] pub struct LazyCumulative(pub LazyVecFrom2) where I: VecIndex, diff --git a/crates/brk_computer/src/internal/aggregation/first.rs b/crates/brk_computer/src/internal/aggregation/first.rs index 7021ca33e..67a6edd96 100644 --- a/crates/brk_computer/src/internal/aggregation/first.rs +++ b/crates/brk_computer/src/internal/aggregation/first.rs @@ -11,7 +11,7 @@ use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; #[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(wrap = "first")] +#[traversable(transparent)] pub struct LazyFirst(pub LazyVecFrom2) where I: VecIndex, diff --git a/crates/brk_computer/src/internal/aggregation/last.rs b/crates/brk_computer/src/internal/aggregation/last.rs index 9124b08d7..989646863 100644 --- a/crates/brk_computer/src/internal/aggregation/last.rs +++ b/crates/brk_computer/src/internal/aggregation/last.rs @@ -11,7 +11,7 @@ use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; #[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(wrap = "last")] +#[traversable(transparent)] pub struct LazyLast(pub LazyVecFrom2) where I: VecIndex, diff --git a/crates/brk_computer/src/internal/aggregation/max.rs b/crates/brk_computer/src/internal/aggregation/max.rs index 836544ffd..0da0b25ef 100644 --- a/crates/brk_computer/src/internal/aggregation/max.rs +++ b/crates/brk_computer/src/internal/aggregation/max.rs @@ -11,7 +11,7 @@ use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; #[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(wrap = "max")] +#[traversable(transparent)] pub struct LazyMax(pub LazyVecFrom2) where I: VecIndex, diff --git a/crates/brk_computer/src/internal/aggregation/min.rs b/crates/brk_computer/src/internal/aggregation/min.rs index 7ae536cea..bbf8491dd 100644 --- a/crates/brk_computer/src/internal/aggregation/min.rs +++ b/crates/brk_computer/src/internal/aggregation/min.rs @@ -11,7 +11,7 @@ use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; #[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(wrap = "min")] +#[traversable(transparent)] pub struct LazyMin(pub LazyVecFrom2) where I: VecIndex, diff --git a/crates/brk_computer/src/internal/aggregation/sum.rs b/crates/brk_computer/src/internal/aggregation/sum.rs index f97dff1d2..449db2dce 100644 --- a/crates/brk_computer/src/internal/aggregation/sum.rs +++ b/crates/brk_computer/src/internal/aggregation/sum.rs @@ -11,7 +11,7 @@ use crate::internal::ComputedVecValue; const VERSION: Version = Version::ZERO; #[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(wrap = "sum")] +#[traversable(transparent)] pub struct LazySum(pub LazyVecFrom2) where I: VecIndex, @@ -31,9 +31,28 @@ where version: Version, source: IterableBoxedVec, len_source: IterableBoxedVec, + ) -> Self { + Self::from_source_inner(&format!("{name}_sum"), version, source, len_source) + } + + /// Create from source without adding _sum suffix. + pub fn from_source_raw( + name: &str, + version: Version, + source: IterableBoxedVec, + len_source: IterableBoxedVec, + ) -> Self { + Self::from_source_inner(name, version, source, len_source) + } + + fn from_source_inner( + name: &str, + version: Version, + source: IterableBoxedVec, + len_source: IterableBoxedVec, ) -> Self { Self(LazyVecFrom2::init( - &format!("{name}_sum"), + name, version + VERSION, source, len_source, diff --git a/crates/brk_computer/src/internal/aggregation/sum_cum.rs b/crates/brk_computer/src/internal/aggregation/sum_cum.rs index dcd6ad6b4..183e64547 100644 --- a/crates/brk_computer/src/internal/aggregation/sum_cum.rs +++ b/crates/brk_computer/src/internal/aggregation/sum_cum.rs @@ -47,5 +47,24 @@ where ), } } + + /// Create from sources without adding _sum suffix to sum vec. + pub fn from_sources_sum_raw( + name: &str, + version: Version, + sum_source: IterableBoxedVec, + cumulative_source: IterableBoxedVec, + len_source: IterableBoxedVec, + ) -> Self { + Self { + sum: LazySum::from_source_raw(name, version + VERSION, sum_source, len_source.clone()), + cumulative: LazyCumulative::from_source( + name, + version + VERSION, + cumulative_source, + len_source, + ), + } + } } diff --git a/crates/brk_computer/src/internal/compute.rs b/crates/brk_computer/src/internal/compute.rs index e6312045b..a673466d4 100644 --- a/crates/brk_computer/src/internal/compute.rs +++ b/crates/brk_computer/src/internal/compute.rs @@ -55,46 +55,17 @@ where { let combined_version = source.version() + first_indexes.version() + count_indexes.version(); - let mut starting_index = max_from; - - if let Some(ref mut v) = first { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = last { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = min { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = max { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = average { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = sum { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = cumulative { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = median { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = pct10 { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = pct25 { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = pct75 { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = pct90 { - starting_index = validate_and_start(v, combined_version, starting_index)?; + macro_rules! validate_vec { + ($($vec:ident),*) => {{ + let mut idx = max_from; + $(if let Some(ref mut v) = $vec { + idx = validate_and_start(v, combined_version, idx)?; + })* + idx + }}; } - let index = starting_index; + let index = validate_vec!(first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90); let needs_first = first.is_some(); let needs_last = last.is_some(); @@ -239,43 +210,14 @@ where let _lock = exit.lock(); - if let Some(v) = first { - v.write()?; - } - if let Some(v) = last { - v.write()?; - } - if let Some(v) = min { - v.write()?; - } - if let Some(v) = max { - v.write()?; - } - if let Some(v) = average { - v.write()?; - } - if let Some(v) = sum { - v.write()?; - } - if let Some(v) = cumulative { - v.write()?; - } - if let Some(v) = median { - v.write()?; - } - if let Some(v) = pct10 { - v.write()?; - } - if let Some(v) = pct25 { - v.write()?; - } - if let Some(v) = pct75 { - v.write()?; - } - if let Some(v) = pct90 { - v.write()?; + macro_rules! write_vec { + ($($vec:ident),*) => { + $(if let Some(v) = $vec { v.write()?; })* + }; } + write_vec!(first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90); + Ok(()) } @@ -351,31 +293,17 @@ where { let combined_version = first_indexes.version() + count_indexes.version(); - let mut starting_index = max_from; - - if let Some(ref mut v) = first { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = last { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = min { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = max { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = average { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = sum { - starting_index = validate_and_start(v, combined_version, starting_index)?; - } - if let Some(ref mut v) = cumulative { - starting_index = validate_and_start(v, combined_version, starting_index)?; + macro_rules! validate_vec { + ($($vec:ident),*) => {{ + let mut idx = max_from; + $(if let Some(ref mut v) = $vec { + idx = validate_and_start(v, combined_version, idx)?; + })* + idx + }}; } - let index = starting_index; + let index = validate_vec!(first, last, min, max, average, sum, cumulative); let needs_first = first.is_some(); let needs_last = last.is_some(); @@ -495,27 +423,13 @@ where let _lock = exit.lock(); - if let Some(v) = first { - v.write()?; - } - if let Some(v) = last { - v.write()?; - } - if let Some(v) = min { - v.write()?; - } - if let Some(v) = max { - v.write()?; - } - if let Some(v) = average { - v.write()?; - } - if let Some(v) = sum { - v.write()?; - } - if let Some(v) = cumulative { - v.write()?; + macro_rules! write_vec { + ($($vec:ident),*) => { + $(if let Some(v) = $vec { v.write()?; })* + }; } + write_vec!(first, last, min, max, average, sum, cumulative); + Ok(()) } diff --git a/crates/brk_computer/src/internal/computed/block/full.rs b/crates/brk_computer/src/internal/computed/block/full.rs index a563e44b6..c281d7f62 100644 --- a/crates/brk_computer/src/internal/computed/block/full.rs +++ b/crates/brk_computer/src/internal/computed/block/full.rs @@ -22,7 +22,6 @@ where pub height: EagerVec>, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: DerivedComputedBlockFull, } diff --git a/crates/brk_computer/src/internal/computed/block/height_date_bytes.rs b/crates/brk_computer/src/internal/computed/block/height_date_bytes.rs new file mode 100644 index 000000000..97283f85e --- /dev/null +++ b/crates/brk_computer/src/internal/computed/block/height_date_bytes.rs @@ -0,0 +1,37 @@ +//! HeightDateBytes - height + dateindex BytesVec storage. +//! +//! Use this for simple cases where both height and dateindex are stored BytesVecs +//! without any lazy derivations. For OHLC-type data. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{DateIndex, Height, Version}; +use schemars::JsonSchema; +use serde::Serialize; +use vecdb::{BytesVec, BytesVecValue, Database, Formattable, ImportableVec}; + +#[derive(Clone, Traversable)] +#[traversable(merge)] +pub struct HeightDateBytes +where + T: BytesVecValue + Formattable + Serialize + JsonSchema, +{ + pub height: BytesVec, + pub dateindex: BytesVec, +} + +const VERSION: Version = Version::ZERO; + +impl HeightDateBytes +where + T: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, +{ + pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + let v = version + VERSION; + + Ok(Self { + height: BytesVec::forced_import(db, name, v)?, + dateindex: BytesVec::forced_import(db, name, v)?, + }) + } +} diff --git a/crates/brk_computer/src/internal/computed/block/height_date_first.rs b/crates/brk_computer/src/internal/computed/block/height_date_first.rs new file mode 100644 index 000000000..c871c460e --- /dev/null +++ b/crates/brk_computer/src/internal/computed/block/height_date_first.rs @@ -0,0 +1,72 @@ +//! ComputedHeightDateFirst - height storage + dateindex storage + lazy periods. +//! +//! Use this when both height and dateindex are stored EagerVecs with first-value aggregation. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; + +use crate::{ComputeIndexes, indexes}; + +use crate::internal::{ComputedDateFirst, ComputedVecValue, LazyFirst}; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct ComputedHeightDateFirst +where + T: ComputedVecValue + PartialOrd + JsonSchema, +{ + pub height: EagerVec>, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub rest: ComputedDateFirst, + pub difficultyepoch: LazyFirst, +} + +const VERSION: Version = Version::ZERO; + +impl ComputedHeightDateFirst +where + T: ComputedVecValue + JsonSchema + 'static, +{ + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + + let height: EagerVec> = EagerVec::forced_import(db, name, v)?; + let rest = ComputedDateFirst::forced_import(db, name, v, indexes)?; + let difficultyepoch = LazyFirst::from_source( + name, + v, + height.boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), + ); + + Ok(Self { + height, + rest, + difficultyepoch, + }) + } + + /// Compute rest (dateindex + periods) with the given compute function. + pub fn compute_rest( + &mut self, + starting_indexes: &ComputeIndexes, + exit: &Exit, + compute: F, + ) -> Result<()> + where + F: FnMut(&mut EagerVec>) -> Result<()>, + { + self.rest.compute_all(starting_indexes, exit, compute) + } +} diff --git a/crates/brk_computer/src/internal/computed/block/height_date_last.rs b/crates/brk_computer/src/internal/computed/block/height_date_last.rs new file mode 100644 index 000000000..d14af0b00 --- /dev/null +++ b/crates/brk_computer/src/internal/computed/block/height_date_last.rs @@ -0,0 +1,72 @@ +//! ComputedHeightDateLast - height storage + dateindex storage + lazy periods. +//! +//! Use this when both height and dateindex are stored EagerVecs with last-value aggregation. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; + +use crate::{indexes, internal::ComputedDateLast, ComputeIndexes}; + +use crate::internal::{ComputedVecValue, LazyLast}; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct ComputedHeightDateLast +where + T: ComputedVecValue + PartialOrd + JsonSchema, +{ + pub height: EagerVec>, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub rest: ComputedDateLast, + pub difficultyepoch: LazyLast, +} + +const VERSION: Version = Version::ZERO; + +impl ComputedHeightDateLast +where + T: ComputedVecValue + JsonSchema + 'static, +{ + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + + let height: EagerVec> = EagerVec::forced_import(db, name, v)?; + let rest = ComputedDateLast::forced_import(db, name, v, indexes)?; + let difficultyepoch = LazyLast::from_source( + name, + v, + height.boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), + ); + + Ok(Self { + height, + rest, + difficultyepoch, + }) + } + + /// Compute rest (dateindex + periods) with the given compute function. + pub fn compute_rest( + &mut self, + starting_indexes: &ComputeIndexes, + exit: &Exit, + compute: F, + ) -> Result<()> + where + F: FnMut(&mut EagerVec>) -> Result<()>, + { + self.rest.compute_all(starting_indexes, exit, compute) + } +} diff --git a/crates/brk_computer/src/internal/computed/block/height_date_max.rs b/crates/brk_computer/src/internal/computed/block/height_date_max.rs new file mode 100644 index 000000000..fb97dc352 --- /dev/null +++ b/crates/brk_computer/src/internal/computed/block/height_date_max.rs @@ -0,0 +1,72 @@ +//! ComputedHeightDateMax - height storage + dateindex storage + lazy periods. +//! +//! Use this when both height and dateindex are stored EagerVecs with max-value aggregation. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; + +use crate::{ComputeIndexes, indexes}; + +use crate::internal::{ComputedDateMax, ComputedVecValue, LazyMax}; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct ComputedHeightDateMax +where + T: ComputedVecValue + PartialOrd + JsonSchema, +{ + pub height: EagerVec>, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub rest: ComputedDateMax, + pub difficultyepoch: LazyMax, +} + +const VERSION: Version = Version::ZERO; + +impl ComputedHeightDateMax +where + T: ComputedVecValue + JsonSchema + 'static, +{ + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + + let height: EagerVec> = EagerVec::forced_import(db, name, v)?; + let rest = ComputedDateMax::forced_import(db, name, v, indexes)?; + let difficultyepoch = LazyMax::from_source( + name, + v, + height.boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), + ); + + Ok(Self { + height, + rest, + difficultyepoch, + }) + } + + /// Compute rest (dateindex + periods) with the given compute function. + pub fn compute_rest( + &mut self, + starting_indexes: &ComputeIndexes, + exit: &Exit, + compute: F, + ) -> Result<()> + where + F: FnMut(&mut EagerVec>) -> Result<()>, + { + self.rest.compute_all(starting_indexes, exit, compute) + } +} diff --git a/crates/brk_computer/src/internal/computed/block/height_date_min.rs b/crates/brk_computer/src/internal/computed/block/height_date_min.rs new file mode 100644 index 000000000..bc3e4b7d9 --- /dev/null +++ b/crates/brk_computer/src/internal/computed/block/height_date_min.rs @@ -0,0 +1,72 @@ +//! ComputedHeightDateMin - height storage + dateindex storage + lazy periods. +//! +//! Use this when both height and dateindex are stored EagerVecs with min-value aggregation. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; + +use crate::{ComputeIndexes, indexes}; + +use crate::internal::{ComputedDateMin, ComputedVecValue, LazyMin}; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct ComputedHeightDateMin +where + T: ComputedVecValue + PartialOrd + JsonSchema, +{ + pub height: EagerVec>, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub rest: ComputedDateMin, + pub difficultyepoch: LazyMin, +} + +const VERSION: Version = Version::ZERO; + +impl ComputedHeightDateMin +where + T: ComputedVecValue + JsonSchema + 'static, +{ + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + + let height: EagerVec> = EagerVec::forced_import(db, name, v)?; + let rest = ComputedDateMin::forced_import(db, name, v, indexes)?; + let difficultyepoch = LazyMin::from_source( + name, + v, + height.boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), + ); + + Ok(Self { + height, + rest, + difficultyepoch, + }) + } + + /// Compute rest (dateindex + periods) with the given compute function. + pub fn compute_rest( + &mut self, + starting_indexes: &ComputeIndexes, + exit: &Exit, + compute: F, + ) -> Result<()> + where + F: FnMut(&mut EagerVec>) -> Result<()>, + { + self.rest.compute_all(starting_indexes, exit, compute) + } +} diff --git a/crates/brk_computer/src/internal/computed/block/last.rs b/crates/brk_computer/src/internal/computed/block/last.rs index 0919fcf67..cc15c9cbd 100644 --- a/crates/brk_computer/src/internal/computed/block/last.rs +++ b/crates/brk_computer/src/internal/computed/block/last.rs @@ -58,6 +58,16 @@ where F: FnMut(&mut EagerVec>) -> Result<()>, { compute(&mut self.height)?; + self.compute_rest(indexes, starting_indexes, exit) + } + + /// Compute rest from self.height (for stateful computation patterns). + pub fn compute_rest( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { self.rest .derive_from(indexes, starting_indexes, &self.height, exit) } diff --git a/crates/brk_computer/src/internal/computed/block/lazy_sum_cum.rs b/crates/brk_computer/src/internal/computed/block/lazy_sum_cum.rs new file mode 100644 index 000000000..bb2a5468c --- /dev/null +++ b/crates/brk_computer/src/internal/computed/block/lazy_sum_cum.rs @@ -0,0 +1,77 @@ +//! Lazy binary height + stored derived SumCum. +//! +//! Use this when you need: +//! - Lazy height (binary transform from two sources) +//! - Stored cumulative and dateindex aggregates +//! - Lazy coarser period lookups + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{Database, Exit, IterableCloneableVec, LazyVecFrom2}; + +use crate::{indexes, ComputeIndexes}; + +use crate::internal::{ComputedVecValue, DerivedComputedBlockSumCum, NumericValue}; + +/// Lazy binary height + stored derived block SumCum. +/// +/// Height is a lazy binary transform (e.g., mask × source, or price × sats). +/// Cumulative and dateindex are stored (computed from lazy height). +/// Coarser periods are lazy lookups. +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct LazyComputedBlockSumCum +where + T: ComputedVecValue + PartialOrd + JsonSchema, + S1T: ComputedVecValue, + S2T: ComputedVecValue, +{ + #[traversable(wrap = "sum")] + pub height: LazyVecFrom2, + #[deref] + #[deref_mut] + pub rest: DerivedComputedBlockSumCum, +} + +const VERSION: Version = Version::ZERO; + +impl LazyComputedBlockSumCum +where + T: NumericValue + JsonSchema, + S1T: ComputedVecValue + JsonSchema, + S2T: ComputedVecValue + JsonSchema, +{ + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + height: LazyVecFrom2, + ) -> Result { + let v = version + VERSION; + + let rest = DerivedComputedBlockSumCum::forced_import( + db, + name, + height.boxed_clone(), + v, + indexes, + )?; + + Ok(Self { height, rest }) + } + + /// Derive aggregates from the lazy height source. + pub fn derive_from( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.rest + .derive_from(indexes, starting_indexes, &self.height, exit) + } +} diff --git a/crates/brk_computer/src/internal/computed/block/mod.rs b/crates/brk_computer/src/internal/computed/block/mod.rs index f51ca8c58..236bcad43 100644 --- a/crates/brk_computer/src/internal/computed/block/mod.rs +++ b/crates/brk_computer/src/internal/computed/block/mod.rs @@ -3,11 +3,23 @@ //! For simpler chain-level types (height + difficultyepoch only), see `chain/`. mod full; +mod height_date_bytes; +mod height_date_first; +mod height_date_last; +mod height_date_max; +mod height_date_min; mod last; +mod lazy_sum_cum; mod sum; mod sum_cum; pub use full::*; +pub use height_date_bytes::*; +pub use height_date_first::*; +pub use height_date_last::*; +pub use height_date_max::*; +pub use height_date_min::*; pub use last::*; +pub use lazy_sum_cum::*; pub use sum::*; pub use sum_cum::*; diff --git a/crates/brk_computer/src/internal/computed/block/sum.rs b/crates/brk_computer/src/internal/computed/block/sum.rs index 6af803919..925eaa02a 100644 --- a/crates/brk_computer/src/internal/computed/block/sum.rs +++ b/crates/brk_computer/src/internal/computed/block/sum.rs @@ -18,7 +18,6 @@ pub struct ComputedBlockSum where T: ComputedVecValue + PartialOrd + JsonSchema, { - #[traversable(wrap = "base")] pub height: EagerVec>, #[deref] #[deref_mut] @@ -42,13 +41,8 @@ where let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - let rest = DerivedComputedBlockSum::forced_import( - db, - name, - height.boxed_clone(), - v, - indexes, - )?; + let rest = + DerivedComputedBlockSum::forced_import(db, name, height.boxed_clone(), v, indexes)?; Ok(Self { height, rest }) } @@ -64,6 +58,17 @@ where F: FnMut(&mut EagerVec>) -> Result<()>, { compute(&mut self.height)?; - self.rest.derive_from(indexes, starting_indexes, &self.height, exit) + self.compute_rest(indexes, starting_indexes, exit) + } + + /// Compute rest from self.height (for stateful computation patterns). + pub fn compute_rest( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.rest + .derive_from(indexes, starting_indexes, &self.height, exit) } } diff --git a/crates/brk_computer/src/internal/computed/block/sum_cum.rs b/crates/brk_computer/src/internal/computed/block/sum_cum.rs index e2e899bc2..dc2a431c3 100644 --- a/crates/brk_computer/src/internal/computed/block/sum_cum.rs +++ b/crates/brk_computer/src/internal/computed/block/sum_cum.rs @@ -21,11 +21,10 @@ pub struct ComputedBlockSumCum where T: ComputedVecValue + PartialOrd + JsonSchema, { - #[traversable(wrap = "base")] + #[traversable(wrap = "sum")] pub height: EagerVec>, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: DerivedComputedBlockSumCum, } @@ -67,6 +66,16 @@ where F: FnMut(&mut EagerVec>) -> Result<()>, { compute(&mut self.height)?; + self.compute_rest(indexes, starting_indexes, exit) + } + + /// Compute rest from self.height (for stateful computation patterns). + pub fn compute_rest( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { self.rest.derive_from(indexes, starting_indexes, &self.height, exit) } diff --git a/crates/brk_computer/src/internal/computed/chain/first.rs b/crates/brk_computer/src/internal/computed/chain/first.rs deleted file mode 100644 index b2d68d8a0..000000000 --- a/crates/brk_computer/src/internal/computed/chain/first.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! ComputedChain for first-value aggregation. - -use brk_error::Result; - -use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Height, Version}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedVecValue, LazyFirst, NumericValue}; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct ComputedChainFirst -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub height: EagerVec>, - pub difficultyepoch: LazyFirst, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedChainFirst -where - T: NumericValue + JsonSchema, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let v = version + VERSION; - - let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - - let difficultyepoch = LazyFirst::from_source( - name, - v, - height.boxed_clone(), - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), - ); - - Ok(Self { - height, - difficultyepoch, - }) - } - - pub fn compute( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.height)?; - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/computed/chain/last.rs b/crates/brk_computer/src/internal/computed/chain/last.rs deleted file mode 100644 index bd3a0b90c..000000000 --- a/crates/brk_computer/src/internal/computed/chain/last.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! ComputedChain for last-value aggregation. - -use brk_error::Result; - -use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Height, Version}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedVecValue, LazyLast, NumericValue}; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct ComputedChainLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub height: EagerVec>, - pub difficultyepoch: LazyLast, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedChainLast -where - T: NumericValue + JsonSchema, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let v = version + VERSION; - - let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - - let difficultyepoch = LazyLast::from_source( - name, - v, - height.boxed_clone(), - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), - ); - - Ok(Self { - height, - difficultyepoch, - }) - } - - pub fn compute( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.height)?; - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/computed/chain/max.rs b/crates/brk_computer/src/internal/computed/chain/max.rs deleted file mode 100644 index 3308641b7..000000000 --- a/crates/brk_computer/src/internal/computed/chain/max.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! ComputedChain for max-value aggregation. - -use brk_error::Result; - -use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Height, Version}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedVecValue, LazyMax, NumericValue}; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct ComputedChainMax -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub height: EagerVec>, - pub difficultyepoch: LazyMax, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedChainMax -where - T: NumericValue + JsonSchema, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let v = version + VERSION; - - let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - - let difficultyepoch = LazyMax::from_source( - name, - v, - height.boxed_clone(), - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), - ); - - Ok(Self { - height, - difficultyepoch, - }) - } - - pub fn compute( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.height)?; - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/computed/chain/min.rs b/crates/brk_computer/src/internal/computed/chain/min.rs deleted file mode 100644 index f71569941..000000000 --- a/crates/brk_computer/src/internal/computed/chain/min.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! ComputedChain for min-value aggregation. - -use brk_error::Result; - -use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Height, Version}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; - -use crate::{ComputeIndexes, indexes}; - -use crate::internal::{ComputedVecValue, LazyMin, NumericValue}; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct ComputedChainMin -where - T: ComputedVecValue + PartialOrd + JsonSchema, -{ - pub height: EagerVec>, - pub difficultyepoch: LazyMin, -} - -const VERSION: Version = Version::ZERO; - -impl ComputedChainMin -where - T: NumericValue + JsonSchema, -{ - pub fn forced_import( - db: &Database, - name: &str, - version: Version, - indexes: &indexes::Vecs, - ) -> Result { - let v = version + VERSION; - - let height: EagerVec> = EagerVec::forced_import(db, name, v)?; - - let difficultyepoch = LazyMin::from_source( - name, - v, - height.boxed_clone(), - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), - ); - - Ok(Self { - height, - difficultyepoch, - }) - } - - pub fn compute( - &mut self, - _starting_indexes: &ComputeIndexes, - _exit: &Exit, - mut compute: F, - ) -> Result<()> - where - F: FnMut(&mut EagerVec>) -> Result<()>, - { - compute(&mut self.height)?; - Ok(()) - } -} diff --git a/crates/brk_computer/src/internal/computed/chain/mod.rs b/crates/brk_computer/src/internal/computed/chain/mod.rs deleted file mode 100644 index 660f8353a..000000000 --- a/crates/brk_computer/src/internal/computed/chain/mod.rs +++ /dev/null @@ -1,13 +0,0 @@ -//! Chain-level computed types (height + difficultyepoch only). -//! -//! These are simpler than block-level types which include dateindex + periods. - -mod first; -mod last; -mod max; -mod min; - -pub use first::*; -pub use last::*; -pub use max::*; -pub use min::*; diff --git a/crates/brk_computer/src/internal/computed/date/first.rs b/crates/brk_computer/src/internal/computed/date/first.rs index 63e0bb3df..5c49dd254 100644 --- a/crates/brk_computer/src/internal/computed/date/first.rs +++ b/crates/brk_computer/src/internal/computed/date/first.rs @@ -13,7 +13,7 @@ use crate::internal::{ComputedVecValue, DerivedDateFirst}; #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct ComputedVecsDateFirst +pub struct ComputedDateFirst where T: ComputedVecValue + PartialOrd + JsonSchema, { @@ -26,7 +26,7 @@ where const VERSION: Version = Version::ZERO; -impl ComputedVecsDateFirst +impl ComputedDateFirst where T: ComputedVecValue + JsonSchema + 'static, { diff --git a/crates/brk_computer/src/internal/computed/date/max.rs b/crates/brk_computer/src/internal/computed/date/max.rs index 865a1e00e..feb2d45c8 100644 --- a/crates/brk_computer/src/internal/computed/date/max.rs +++ b/crates/brk_computer/src/internal/computed/date/max.rs @@ -13,7 +13,7 @@ use crate::internal::{ComputedVecValue, DerivedDateMax}; #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct ComputedVecsDateMax +pub struct ComputedDateMax where T: ComputedVecValue + PartialOrd + JsonSchema, { @@ -26,7 +26,7 @@ where const VERSION: Version = Version::ZERO; -impl ComputedVecsDateMax +impl ComputedDateMax where T: ComputedVecValue + JsonSchema + 'static, { diff --git a/crates/brk_computer/src/internal/computed/date/min.rs b/crates/brk_computer/src/internal/computed/date/min.rs index 1fc1d8dbc..6f0a7eb02 100644 --- a/crates/brk_computer/src/internal/computed/date/min.rs +++ b/crates/brk_computer/src/internal/computed/date/min.rs @@ -13,7 +13,7 @@ use crate::internal::{ComputedVecValue, DerivedDateMin}; #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(merge)] -pub struct ComputedVecsDateMin +pub struct ComputedDateMin where T: ComputedVecValue + PartialOrd + JsonSchema, { @@ -26,7 +26,7 @@ where const VERSION: Version = Version::ZERO; -impl ComputedVecsDateMin +impl ComputedDateMin where T: ComputedVecValue + JsonSchema + 'static, { diff --git a/crates/brk_computer/src/internal/computed/mod.rs b/crates/brk_computer/src/internal/computed/mod.rs index 43b86efdc..6e0f85fb1 100644 --- a/crates/brk_computer/src/internal/computed/mod.rs +++ b/crates/brk_computer/src/internal/computed/mod.rs @@ -1,11 +1,7 @@ mod block; -mod chain; mod date; -mod derived_block; mod tx; pub use block::*; -pub use chain::*; pub use date::*; -pub use derived_block::*; pub use tx::*; diff --git a/crates/brk_computer/src/internal/computed/tx/distribution.rs b/crates/brk_computer/src/internal/computed/tx/distribution.rs index 4f632de97..a8628b691 100644 --- a/crates/brk_computer/src/internal/computed/tx/distribution.rs +++ b/crates/brk_computer/src/internal/computed/tx/distribution.rs @@ -1,43 +1,33 @@ -//! ComputedTxDistribution - computes TxIndex data to height Distribution + dateindex MinMaxAverage + lazy aggregations. -//! -//! Note: Percentiles are computed at height level only. DateIndex and coarser -//! periods only have average+min+max since computing percentiles across all -//! transactions per day would be expensive. +//! ComputedTxDistribution - eager txindex source + derived distribution. use brk_error::Result; use brk_indexer::Indexer; - use brk_traversable::Traversable; -use brk_types::{DateIndex, DifficultyEpoch, Height, TxIndex, Version}; +use brk_types::{TxIndex, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{CollectableVec, Database, Exit, IterableCloneableVec}; +use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec}; use crate::{ ComputeIndexes, indexes, - internal::{ - ComputedVecValue, DerivedDateDistribution, Distribution, LazyDistribution, MinMaxAverage, - NumericValue, - }, + internal::{ComputedVecValue, DerivedTxDistribution, NumericValue}, }; +const VERSION: Version = Version::ZERO; + #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(merge)] pub struct ComputedTxDistribution where T: ComputedVecValue + PartialOrd + JsonSchema, { - pub height: Distribution, - pub difficultyepoch: LazyDistribution, - pub dateindex: MinMaxAverage, + pub txindex: EagerVec>, #[deref] #[deref_mut] #[traversable(flatten)] - pub dates: DerivedDateDistribution, + pub distribution: DerivedTxDistribution, } -const VERSION: Version = Version::ZERO; - impl ComputedTxDistribution where T: NumericValue + JsonSchema, @@ -48,38 +38,10 @@ where version: Version, indexes: &indexes::Vecs, ) -> Result { - let height = Distribution::forced_import(db, name, version + VERSION)?; - let dateindex = MinMaxAverage::forced_import(db, name, version + VERSION)?; let v = version + VERSION; - - let difficultyepoch = - LazyDistribution::::from_distribution( - name, - v, - height.average.0.boxed_clone(), - height.minmax.min.0.boxed_clone(), - height.minmax.max.0.boxed_clone(), - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), - ); - - let dates = DerivedDateDistribution::from_sources( - name, - v, - dateindex.average.0.boxed_clone(), - dateindex.minmax.min.0.boxed_clone(), - dateindex.minmax.max.0.boxed_clone(), - indexes, - ); - - Ok(Self { - height, - difficultyepoch, - dateindex, - dates, - }) + let txindex = EagerVec::forced_import(db, name, v)?; + let distribution = DerivedTxDistribution::forced_import(db, name, v, indexes)?; + Ok(Self { txindex, distribution }) } pub fn derive_from( @@ -87,25 +49,9 @@ where indexer: &Indexer, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - txindex_source: &impl CollectableVec, exit: &Exit, ) -> Result<()> { - self.height.compute( - starting_indexes.height, - txindex_source, - &indexer.vecs.tx.height_to_first_txindex, - &indexes.block.height_to_txindex_count, - exit, - )?; - - self.dateindex.compute( - starting_indexes.dateindex, - &self.height.average.0, - &indexes.time.dateindex_to_first_height, - &indexes.time.dateindex_to_height_count, - exit, - )?; - - Ok(()) + self.distribution + .derive_from(indexer, indexes, starting_indexes, &self.txindex, exit) } } diff --git a/crates/brk_computer/src/internal/computed/tx/mod.rs b/crates/brk_computer/src/internal/computed/tx/mod.rs index b0d01da0e..79fe1f2d5 100644 --- a/crates/brk_computer/src/internal/computed/tx/mod.rs +++ b/crates/brk_computer/src/internal/computed/tx/mod.rs @@ -1,5 +1,3 @@ mod distribution; -mod full; pub use distribution::*; -pub use full::*; diff --git a/crates/brk_computer/src/internal/computed/derived_block/distribution.rs b/crates/brk_computer/src/internal/derived/block/distribution.rs similarity index 89% rename from crates/brk_computer/src/internal/computed/derived_block/distribution.rs rename to crates/brk_computer/src/internal/derived/block/distribution.rs index 2c24d4131..9ed564efe 100644 --- a/crates/brk_computer/src/internal/computed/derived_block/distribution.rs +++ b/crates/brk_computer/src/internal/derived/block/distribution.rs @@ -24,7 +24,6 @@ where pub dateindex: Distribution, #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: DerivedDateDistribution, pub difficultyepoch: LazyDistribution, } @@ -60,10 +59,7 @@ where height_source.boxed_clone(), height_source.boxed_clone(), height_source, - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), ); Ok(Self { @@ -83,8 +79,8 @@ where self.dateindex.compute( starting_indexes.dateindex, height_source, - &indexes.time.dateindex_to_first_height, - &indexes.time.dateindex_to_height_count, + &indexes.dateindex.first_height, + &indexes.dateindex.height_count, exit, )?; diff --git a/crates/brk_computer/src/internal/derived/block/first.rs b/crates/brk_computer/src/internal/derived/block/first.rs new file mode 100644 index 000000000..a0f0ade67 --- /dev/null +++ b/crates/brk_computer/src/internal/derived/block/first.rs @@ -0,0 +1,82 @@ +//! DerivedComputedBlockFirst - dateindex storage + difficultyepoch + lazy time periods (first value). + +use brk_error::Result; + +use brk_traversable::Traversable; +use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec}; + +use crate::{ + ComputeIndexes, indexes, + internal::{ComputedVecValue, DerivedDateFirst, FirstVec, LazyFirst, NumericValue}, +}; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct DerivedComputedBlockFirst +where + T: ComputedVecValue + PartialOrd + JsonSchema, +{ + pub dateindex: FirstVec, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub dates: DerivedDateFirst, + pub difficultyepoch: LazyFirst, +} + +const VERSION: Version = Version::ZERO; + +impl DerivedComputedBlockFirst +where + T: NumericValue + JsonSchema, +{ + pub fn forced_import( + db: &Database, + name: &str, + height_source: IterableBoxedVec, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let dateindex = FirstVec::forced_import(db, name, version + VERSION)?; + let v = version + VERSION; + + Ok(Self { + dates: DerivedDateFirst::from_source(name, v, dateindex.0.boxed_clone(), indexes), + difficultyepoch: LazyFirst::from_source( + name, + v, + height_source, + indexes.difficultyepoch.identity.boxed_clone(), + ), + dateindex, + }) + } + + pub fn derive_from( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + height_source: &impl IterableVec, + exit: &Exit, + ) -> Result<()> { + self.dateindex.compute_first( + starting_indexes.dateindex, + height_source, + &indexes.dateindex.first_height, + &indexes.dateindex.height_count, + exit, + )?; + Ok(()) + } + + pub fn compute_all(&mut self, mut compute: F) -> Result<()> + where + F: FnMut(&mut FirstVec) -> Result<()>, + { + compute(&mut self.dateindex)?; + Ok(()) + } +} diff --git a/crates/brk_computer/src/internal/computed/derived_block/full.rs b/crates/brk_computer/src/internal/derived/block/full.rs similarity index 91% rename from crates/brk_computer/src/internal/computed/derived_block/full.rs rename to crates/brk_computer/src/internal/derived/block/full.rs index 9affef961..9509dcae9 100644 --- a/crates/brk_computer/src/internal/computed/derived_block/full.rs +++ b/crates/brk_computer/src/internal/derived/block/full.rs @@ -22,11 +22,11 @@ pub struct DerivedComputedBlockFull where T: ComputedVecValue + PartialOrd + JsonSchema, { + #[traversable(rename = "cumulative")] pub height_cumulative: CumulativeVec, pub dateindex: Full, #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: DerivedDateFull, pub difficultyepoch: LazyFull, } @@ -67,10 +67,7 @@ where height_source.boxed_clone(), height_source.boxed_clone(), height_cumulative.0.boxed_clone(), - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), ), height_cumulative, dateindex, @@ -91,8 +88,8 @@ where self.dateindex.compute( starting_indexes.dateindex, height_source, - &indexes.time.dateindex_to_first_height, - &indexes.time.dateindex_to_height_count, + &indexes.dateindex.first_height, + &indexes.dateindex.height_count, exit, )?; diff --git a/crates/brk_computer/src/internal/computed/derived_block/last.rs b/crates/brk_computer/src/internal/derived/block/last.rs similarity index 88% rename from crates/brk_computer/src/internal/computed/derived_block/last.rs rename to crates/brk_computer/src/internal/derived/block/last.rs index f10d06557..ad908f6de 100644 --- a/crates/brk_computer/src/internal/computed/derived_block/last.rs +++ b/crates/brk_computer/src/internal/derived/block/last.rs @@ -49,10 +49,7 @@ where name, v, height_source, - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), ), dateindex, }) @@ -68,8 +65,8 @@ where self.dateindex.compute_last( starting_indexes.dateindex, height_source, - &indexes.time.dateindex_to_first_height, - &indexes.time.dateindex_to_height_count, + &indexes.dateindex.first_height, + &indexes.dateindex.height_count, exit, )?; Ok(()) diff --git a/crates/brk_computer/src/internal/computed/derived_block/mod.rs b/crates/brk_computer/src/internal/derived/block/mod.rs similarity index 84% rename from crates/brk_computer/src/internal/computed/derived_block/mod.rs rename to crates/brk_computer/src/internal/derived/block/mod.rs index 33607db76..d0fa0a4d0 100644 --- a/crates/brk_computer/src/internal/computed/derived_block/mod.rs +++ b/crates/brk_computer/src/internal/derived/block/mod.rs @@ -1,10 +1,12 @@ mod distribution; +mod first; mod full; mod last; mod sum; mod sum_cum; pub use distribution::*; +pub use first::*; pub use full::*; pub use last::*; pub use sum::*; diff --git a/crates/brk_computer/src/internal/computed/derived_block/sum.rs b/crates/brk_computer/src/internal/derived/block/sum.rs similarity index 88% rename from crates/brk_computer/src/internal/computed/derived_block/sum.rs rename to crates/brk_computer/src/internal/derived/block/sum.rs index a7ce93d90..db2df6fc1 100644 --- a/crates/brk_computer/src/internal/computed/derived_block/sum.rs +++ b/crates/brk_computer/src/internal/derived/block/sum.rs @@ -25,7 +25,6 @@ where pub dateindex: SumVec, #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: DerivedDateSum, pub difficultyepoch: LazySum, } @@ -43,19 +42,16 @@ where version: Version, indexes: &indexes::Vecs, ) -> Result { - let dateindex = SumVec::forced_import(db, name, version + VERSION)?; + let dateindex = SumVec::forced_import_raw(db, name, version + VERSION)?; let v = version + VERSION; Ok(Self { dates: DerivedDateSum::from_source(name, v, dateindex.0.boxed_clone(), indexes), - difficultyepoch: LazySum::from_source( + difficultyepoch: LazySum::from_source_raw( name, v, height_source, - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), ), dateindex, }) @@ -71,8 +67,8 @@ where self.compute_from( starting_indexes.dateindex, height_source, - &indexes.time.dateindex_to_first_height, - &indexes.time.dateindex_to_height_count, + &indexes.dateindex.first_height, + &indexes.dateindex.height_count, exit, ) } diff --git a/crates/brk_computer/src/internal/computed/derived_block/sum_cum.rs b/crates/brk_computer/src/internal/derived/block/sum_cum.rs similarity index 92% rename from crates/brk_computer/src/internal/computed/derived_block/sum_cum.rs rename to crates/brk_computer/src/internal/derived/block/sum_cum.rs index 66e755467..2a730019b 100644 --- a/crates/brk_computer/src/internal/computed/derived_block/sum_cum.rs +++ b/crates/brk_computer/src/internal/derived/block/sum_cum.rs @@ -25,11 +25,11 @@ pub struct DerivedComputedBlockSumCum where T: ComputedVecValue + PartialOrd + JsonSchema, { + #[traversable(rename = "cumulative")] pub height_cumulative: CumulativeVec, pub dateindex: SumCum, #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: DerivedDateSumCum, pub difficultyepoch: LazySumCum, } @@ -50,7 +50,7 @@ where let v = version + VERSION; let height_cumulative = CumulativeVec::forced_import(db, name, v)?; - let dateindex = SumCum::forced_import(db, name, v)?; + let dateindex = SumCum::forced_import_sum_raw(db, name, v)?; let dates = DerivedDateSumCum::from_sources( name, @@ -60,15 +60,12 @@ where indexes, ); - let difficultyepoch = LazySumCum::from_sources( + let difficultyepoch = LazySumCum::from_sources_sum_raw( name, v, height_source.boxed_clone(), height_cumulative.0.boxed_clone(), - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), ); Ok(Self { @@ -90,8 +87,8 @@ where self.compute_dateindex_sum_cum( starting_indexes.dateindex, height_source, - &indexes.time.dateindex_to_first_height, - &indexes.time.dateindex_to_height_count, + &indexes.dateindex.first_height, + &indexes.dateindex.height_count, exit, ) } diff --git a/crates/brk_computer/src/internal/derived/date/average.rs b/crates/brk_computer/src/internal/derived/date/average.rs index d2a226538..4b23cda40 100644 --- a/crates/brk_computer/src/internal/derived/date/average.rs +++ b/crates/brk_computer/src/internal/derived/date/average.rs @@ -38,43 +38,21 @@ where dateindex_source: IterableBoxedVec, indexes: &indexes::Vecs, ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyAverage::from_source_raw(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) + }; + } + Self { - weekindex: LazyAverage::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.weekindex_to_weekindex.boxed_clone(), - ), - monthindex: LazyAverage::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.monthindex_to_monthindex.boxed_clone(), - ), - quarterindex: LazyAverage::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - ), - semesterindex: LazyAverage::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - ), - yearindex: LazyAverage::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.yearindex_to_yearindex.boxed_clone(), - ), - decadeindex: LazyAverage::from_source( - name, - version + VERSION, - dateindex_source, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/derived/date/distribution.rs b/crates/brk_computer/src/internal/derived/date/distribution.rs index 26fd5b263..1cae076c7 100644 --- a/crates/brk_computer/src/internal/derived/date/distribution.rs +++ b/crates/brk_computer/src/internal/derived/date/distribution.rs @@ -40,55 +40,24 @@ where max_source: IterableBoxedVec, indexes: &indexes::Vecs, ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyDistribution::from_distribution( + name, v, average_source.clone(), min_source.clone(), max_source.clone(), + indexes.$idx.identity.boxed_clone(), + ) + }; + } + Self { - weekindex: LazyDistribution::from_distribution( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - indexes.time.weekindex_to_weekindex.boxed_clone(), - ), - monthindex: LazyDistribution::from_distribution( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - indexes.time.monthindex_to_monthindex.boxed_clone(), - ), - quarterindex: LazyDistribution::from_distribution( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - ), - semesterindex: LazyDistribution::from_distribution( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - ), - yearindex: LazyDistribution::from_distribution( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - indexes.time.yearindex_to_yearindex.boxed_clone(), - ), - decadeindex: LazyDistribution::from_distribution( - name, - version + VERSION, - average_source, - min_source, - max_source, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/derived/date/first.rs b/crates/brk_computer/src/internal/derived/date/first.rs index 1d0adba6b..3c58e5949 100644 --- a/crates/brk_computer/src/internal/derived/date/first.rs +++ b/crates/brk_computer/src/internal/derived/date/first.rs @@ -38,43 +38,21 @@ where dateindex_source: IterableBoxedVec, indexes: &indexes::Vecs, ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyFirst::from_source(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) + }; + } + Self { - weekindex: LazyFirst::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.weekindex_to_weekindex.boxed_clone(), - ), - monthindex: LazyFirst::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.monthindex_to_monthindex.boxed_clone(), - ), - quarterindex: LazyFirst::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - ), - semesterindex: LazyFirst::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - ), - yearindex: LazyFirst::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.yearindex_to_yearindex.boxed_clone(), - ), - decadeindex: LazyFirst::from_source( - name, - version + VERSION, - dateindex_source, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/derived/date/full.rs b/crates/brk_computer/src/internal/derived/date/full.rs index 8f1500141..80bb38e99 100644 --- a/crates/brk_computer/src/internal/derived/date/full.rs +++ b/crates/brk_computer/src/internal/derived/date/full.rs @@ -43,67 +43,26 @@ where cumulative_source: IterableBoxedVec, indexes: &indexes::Vecs, ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyFull::from_stats_aggregate( + name, v, + average_source.clone(), min_source.clone(), max_source.clone(), + sum_source.clone(), cumulative_source.clone(), + indexes.$idx.identity.boxed_clone(), + ) + }; + } + Self { - weekindex: LazyFull::from_stats_aggregate( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - sum_source.clone(), - cumulative_source.clone(), - indexes.time.weekindex_to_weekindex.boxed_clone(), - ), - monthindex: LazyFull::from_stats_aggregate( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - sum_source.clone(), - cumulative_source.clone(), - indexes.time.monthindex_to_monthindex.boxed_clone(), - ), - quarterindex: LazyFull::from_stats_aggregate( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - sum_source.clone(), - cumulative_source.clone(), - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - ), - semesterindex: LazyFull::from_stats_aggregate( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - sum_source.clone(), - cumulative_source.clone(), - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - ), - yearindex: LazyFull::from_stats_aggregate( - name, - version + VERSION, - average_source.clone(), - min_source.clone(), - max_source.clone(), - sum_source.clone(), - cumulative_source.clone(), - indexes.time.yearindex_to_yearindex.boxed_clone(), - ), - decadeindex: LazyFull::from_stats_aggregate( - name, - version + VERSION, - average_source, - min_source, - max_source, - sum_source, - cumulative_source, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/derived/date/last.rs b/crates/brk_computer/src/internal/derived/date/last.rs index f73b4f9d7..6d0c1af8a 100644 --- a/crates/brk_computer/src/internal/derived/date/last.rs +++ b/crates/brk_computer/src/internal/derived/date/last.rs @@ -38,43 +38,21 @@ where dateindex_source: IterableBoxedVec, indexes: &indexes::Vecs, ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyLast::from_source(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) + }; + } + Self { - weekindex: LazyLast::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.weekindex_to_weekindex.boxed_clone(), - ), - monthindex: LazyLast::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.monthindex_to_monthindex.boxed_clone(), - ), - quarterindex: LazyLast::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - ), - semesterindex: LazyLast::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - ), - yearindex: LazyLast::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.yearindex_to_yearindex.boxed_clone(), - ), - decadeindex: LazyLast::from_source( - name, - version + VERSION, - dateindex_source, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/derived/date/max.rs b/crates/brk_computer/src/internal/derived/date/max.rs index d28512f3b..66c88f8f0 100644 --- a/crates/brk_computer/src/internal/derived/date/max.rs +++ b/crates/brk_computer/src/internal/derived/date/max.rs @@ -38,43 +38,21 @@ where dateindex_source: IterableBoxedVec, indexes: &indexes::Vecs, ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyMax::from_source(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) + }; + } + Self { - weekindex: LazyMax::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.weekindex_to_weekindex.boxed_clone(), - ), - monthindex: LazyMax::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.monthindex_to_monthindex.boxed_clone(), - ), - quarterindex: LazyMax::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - ), - semesterindex: LazyMax::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - ), - yearindex: LazyMax::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.yearindex_to_yearindex.boxed_clone(), - ), - decadeindex: LazyMax::from_source( - name, - version + VERSION, - dateindex_source, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/derived/date/min.rs b/crates/brk_computer/src/internal/derived/date/min.rs index 2af382b52..cb526dd8f 100644 --- a/crates/brk_computer/src/internal/derived/date/min.rs +++ b/crates/brk_computer/src/internal/derived/date/min.rs @@ -38,43 +38,21 @@ where dateindex_source: IterableBoxedVec, indexes: &indexes::Vecs, ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazyMin::from_source(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) + }; + } + Self { - weekindex: LazyMin::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.weekindex_to_weekindex.boxed_clone(), - ), - monthindex: LazyMin::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.monthindex_to_monthindex.boxed_clone(), - ), - quarterindex: LazyMin::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - ), - semesterindex: LazyMin::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - ), - yearindex: LazyMin::from_source( - name, - version + VERSION, - dateindex_source.clone(), - indexes.time.yearindex_to_yearindex.boxed_clone(), - ), - decadeindex: LazyMin::from_source( - name, - version + VERSION, - dateindex_source, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/derived/date/sum.rs b/crates/brk_computer/src/internal/derived/date/sum.rs index c31fa8f2a..728eb7da7 100644 --- a/crates/brk_computer/src/internal/derived/date/sum.rs +++ b/crates/brk_computer/src/internal/derived/date/sum.rs @@ -39,43 +39,20 @@ where indexes: &indexes::Vecs, ) -> Self { let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazySum::from_source_raw(name, v, dateindex_source.clone(), indexes.$idx.identity.boxed_clone()) + }; + } + Self { - weekindex: LazySum::from_source( - name, - v, - dateindex_source.clone(), - indexes.time.weekindex_to_weekindex.boxed_clone(), - ), - monthindex: LazySum::from_source( - name, - v, - dateindex_source.clone(), - indexes.time.monthindex_to_monthindex.boxed_clone(), - ), - quarterindex: LazySum::from_source( - name, - v, - dateindex_source.clone(), - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - ), - semesterindex: LazySum::from_source( - name, - v, - dateindex_source.clone(), - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - ), - yearindex: LazySum::from_source( - name, - v, - dateindex_source.clone(), - indexes.time.yearindex_to_yearindex.boxed_clone(), - ), - decadeindex: LazySum::from_source( - name, - v, - dateindex_source, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/derived/date/sum_cum.rs b/crates/brk_computer/src/internal/derived/date/sum_cum.rs index 712c2639c..314b08b1c 100644 --- a/crates/brk_computer/src/internal/derived/date/sum_cum.rs +++ b/crates/brk_computer/src/internal/derived/date/sum_cum.rs @@ -39,49 +39,24 @@ where cumulative_source: IterableBoxedVec, indexes: &indexes::Vecs, ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($idx:ident) => { + LazySumCum::from_sources_sum_raw( + name, v, sum_source.clone(), cumulative_source.clone(), + indexes.$idx.identity.boxed_clone(), + ) + }; + } + Self { - weekindex: LazySumCum::from_sources( - name, - version + VERSION, - sum_source.clone(), - cumulative_source.clone(), - indexes.time.weekindex_to_weekindex.boxed_clone(), - ), - monthindex: LazySumCum::from_sources( - name, - version + VERSION, - sum_source.clone(), - cumulative_source.clone(), - indexes.time.monthindex_to_monthindex.boxed_clone(), - ), - quarterindex: LazySumCum::from_sources( - name, - version + VERSION, - sum_source.clone(), - cumulative_source.clone(), - indexes.time.quarterindex_to_quarterindex.boxed_clone(), - ), - semesterindex: LazySumCum::from_sources( - name, - version + VERSION, - sum_source.clone(), - cumulative_source.clone(), - indexes.time.semesterindex_to_semesterindex.boxed_clone(), - ), - yearindex: LazySumCum::from_sources( - name, - version + VERSION, - sum_source.clone(), - cumulative_source.clone(), - indexes.time.yearindex_to_yearindex.boxed_clone(), - ), - decadeindex: LazySumCum::from_sources( - name, - version + VERSION, - sum_source, - cumulative_source, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/derived/mod.rs b/crates/brk_computer/src/internal/derived/mod.rs index 477f89b19..6e0f85fb1 100644 --- a/crates/brk_computer/src/internal/derived/mod.rs +++ b/crates/brk_computer/src/internal/derived/mod.rs @@ -1,5 +1,7 @@ +mod block; mod date; +mod tx; +pub use block::*; pub use date::*; - -// tx derived types have been moved to computed/tx/ +pub use tx::*; diff --git a/crates/brk_computer/src/internal/derived/tx/distribution.rs b/crates/brk_computer/src/internal/derived/tx/distribution.rs new file mode 100644 index 000000000..76c69ffb9 --- /dev/null +++ b/crates/brk_computer/src/internal/derived/tx/distribution.rs @@ -0,0 +1,108 @@ +//! ComputedTxDistribution - computes TxIndex data to height Distribution + dateindex MinMaxAverage + lazy aggregations. +//! +//! Note: Percentiles are computed at height level only. DateIndex and coarser +//! periods only have average+min+max since computing percentiles across all +//! transactions per day would be expensive. + +use brk_error::Result; +use brk_indexer::Indexer; + +use brk_traversable::Traversable; +use brk_types::{DateIndex, DifficultyEpoch, Height, TxIndex, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{CollectableVec, Database, Exit, IterableCloneableVec}; + +use crate::{ + ComputeIndexes, indexes, + internal::{ + ComputedVecValue, DerivedDateDistribution, Distribution, LazyDistribution, MinMaxAverage, + NumericValue, + }, +}; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct DerivedTxDistribution +where + T: ComputedVecValue + PartialOrd + JsonSchema, +{ + pub height: Distribution, + pub difficultyepoch: LazyDistribution, + pub dateindex: MinMaxAverage, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub dates: DerivedDateDistribution, +} + +const VERSION: Version = Version::ZERO; + +impl DerivedTxDistribution +where + T: NumericValue + JsonSchema, +{ + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + let height = Distribution::forced_import(db, name, version + VERSION)?; + let dateindex = MinMaxAverage::forced_import(db, name, version + VERSION)?; + let v = version + VERSION; + + let difficultyepoch = + LazyDistribution::::from_distribution( + name, + v, + height.average.0.boxed_clone(), + height.minmax.min.0.boxed_clone(), + height.minmax.max.0.boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), + ); + + let dates = DerivedDateDistribution::from_sources( + name, + v, + dateindex.average.0.boxed_clone(), + dateindex.minmax.min.0.boxed_clone(), + dateindex.minmax.max.0.boxed_clone(), + indexes, + ); + + Ok(Self { + height, + difficultyepoch, + dateindex, + dates, + }) + } + + pub fn derive_from( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + txindex_source: &impl CollectableVec, + exit: &Exit, + ) -> Result<()> { + self.height.compute( + starting_indexes.height, + txindex_source, + &indexer.vecs.transactions.first_txindex, + &indexes.height.txindex_count, + exit, + )?; + + self.dateindex.compute( + starting_indexes.dateindex, + &self.height.average.0, + &indexes.dateindex.first_height, + &indexes.dateindex.height_count, + exit, + )?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/internal/computed/tx/full.rs b/crates/brk_computer/src/internal/derived/tx/full.rs similarity index 91% rename from crates/brk_computer/src/internal/computed/tx/full.rs rename to crates/brk_computer/src/internal/derived/tx/full.rs index 20f8101ef..c99785ce3 100644 --- a/crates/brk_computer/src/internal/computed/tx/full.rs +++ b/crates/brk_computer/src/internal/derived/tx/full.rs @@ -55,7 +55,7 @@ where height.distribution.minmax.max.0.boxed_clone(), height.sum_cum.sum.0.boxed_clone(), height.sum_cum.cumulative.0.boxed_clone(), - indexes.block.difficultyepoch_to_difficultyepoch.boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), ); let dates = DerivedDateFull::from_sources( @@ -88,16 +88,16 @@ where self.height.compute( starting_indexes.height, txindex_source, - &indexer.vecs.tx.height_to_first_txindex, - &indexes.block.height_to_txindex_count, + &indexer.vecs.transactions.first_txindex, + &indexes.height.txindex_count, exit, )?; self.dateindex.compute( starting_indexes.dateindex, &self.height.distribution.average.0, - &indexes.time.dateindex_to_first_height, - &indexes.time.dateindex_to_height_count, + &indexes.dateindex.first_height, + &indexes.dateindex.height_count, exit, )?; diff --git a/crates/brk_computer/src/internal/derived/tx/mod.rs b/crates/brk_computer/src/internal/derived/tx/mod.rs new file mode 100644 index 000000000..b0d01da0e --- /dev/null +++ b/crates/brk_computer/src/internal/derived/tx/mod.rs @@ -0,0 +1,5 @@ +mod distribution; +mod full; + +pub use distribution::*; +pub use full::*; diff --git a/crates/brk_computer/src/internal/group/full.rs b/crates/brk_computer/src/internal/group/full.rs index 8f65185a6..42da39b31 100644 --- a/crates/brk_computer/src/internal/group/full.rs +++ b/crates/brk_computer/src/internal/group/full.rs @@ -11,7 +11,9 @@ use super::{Distribution, SumCum}; /// Matches the common full_stats() pattern: average + minmax + percentiles + sum + cumulative #[derive(Clone, Traversable)] pub struct Full { + #[traversable(flatten)] pub distribution: Distribution, + #[traversable(flatten)] pub sum_cum: SumCum, } diff --git a/crates/brk_computer/src/internal/group/stats.rs b/crates/brk_computer/src/internal/group/stats.rs index fd796b2ff..b209846eb 100644 --- a/crates/brk_computer/src/internal/group/stats.rs +++ b/crates/brk_computer/src/internal/group/stats.rs @@ -11,8 +11,11 @@ use super::{MinMax, SumCum}; /// Sum + Cumulative + Average + Min + Max. Like `Full` but without percentiles. #[derive(Clone, Traversable)] pub struct Stats { + #[traversable(flatten)] pub sum_cum: SumCum, + #[traversable(flatten)] pub average: AverageVec, + #[traversable(flatten)] pub minmax: MinMax, } diff --git a/crates/brk_computer/src/internal/group/sum_cum.rs b/crates/brk_computer/src/internal/group/sum_cum.rs index 463cc0dc3..4a4f36743 100644 --- a/crates/brk_computer/src/internal/group/sum_cum.rs +++ b/crates/brk_computer/src/internal/group/sum_cum.rs @@ -23,6 +23,14 @@ impl SumCum { }) } + /// Import with raw sum name (no _sum suffix) for cases where sum should merge with base. + pub fn forced_import_sum_raw(db: &Database, name: &str, version: Version) -> Result { + Ok(Self { + sum: SumVec::forced_import_raw(db, name, version)?, + cumulative: CumulativeVec::forced_import(db, name, version)?, + }) + } + /// Compute sum and cumulative from source data. pub fn compute( &mut self, diff --git a/crates/brk_computer/src/internal/lazy/block/binary_full.rs b/crates/brk_computer/src/internal/lazy/block/binary_full.rs index bb9e3d57a..160e96bd8 100644 --- a/crates/brk_computer/src/internal/lazy/block/binary_full.rs +++ b/crates/brk_computer/src/internal/lazy/block/binary_full.rs @@ -22,7 +22,6 @@ where pub height: LazyVecFrom2, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: LazyDerivedBlock2SumCum, } diff --git a/crates/brk_computer/src/internal/lazy/block/binary_last.rs b/crates/brk_computer/src/internal/lazy/block/binary_last.rs new file mode 100644 index 000000000..a8989e82c --- /dev/null +++ b/crates/brk_computer/src/internal/lazy/block/binary_last.rs @@ -0,0 +1,291 @@ +//! Lazy binary transform from two SumCum sources, producing Last (cumulative) ratios only. + +use brk_traversable::Traversable; +use brk_types::{DateIndex, DifficultyEpoch, Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; + +use crate::internal::{ + ComputedBlockLast, ComputedBlockSumCum, ComputedHeightDateLast, ComputedVecValue, + DerivedDateLast, LazyBinaryDateLast, LazyTransform2Last, NumericValue, +}; + +use super::super::derived_block::LazyDerivedBlock2Last; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct LazyBinaryBlockLast +where + T: ComputedVecValue + PartialOrd + JsonSchema, + S1T: ComputedVecValue, + S2T: ComputedVecValue, +{ + pub height: LazyVecFrom2, + #[deref] + #[deref_mut] + pub rest: LazyDerivedBlock2Last, +} + +const VERSION: Version = Version::ZERO; + +impl LazyBinaryBlockLast +where + T: ComputedVecValue + JsonSchema + 'static, + S1T: ComputedVecValue + JsonSchema, + S2T: ComputedVecValue + JsonSchema, +{ + pub fn from_computed_sum_cum>( + name: &str, + version: Version, + source1: &ComputedBlockSumCum, + source2: &ComputedBlockSumCum, + ) -> Self + where + S1T: PartialOrd, + S2T: PartialOrd, + { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::( + name, + v, + source1.height_cumulative.0.boxed_clone(), + source2.height_cumulative.0.boxed_clone(), + ), + rest: LazyDerivedBlock2Last::from_computed_sum_cum::(name, v, source1, source2), + } + } + + pub fn from_computed_last>( + name: &str, + version: Version, + source1: &ComputedBlockLast, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::( + name, + v, + source1.height.boxed_clone(), + source2.height.boxed_clone(), + ), + rest: LazyDerivedBlock2Last::from_computed_last::(name, v, source1, source2), + } + } + + pub fn from_computed_height_date_last>( + name: &str, + version: Version, + source1: &ComputedHeightDateLast, + source2: &ComputedHeightDateLast, + ) -> Self + where + S1T: PartialOrd, + S2T: PartialOrd, + { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::( + name, + v, + source1.height.boxed_clone(), + source2.height.boxed_clone(), + ), + rest: LazyDerivedBlock2Last::from_computed_height_date_last::( + name, v, source1, source2, + ), + } + } + + /// Create from a ComputedHeightDateLast and a LazyBinaryBlockLast. + pub fn from_computed_height_date_and_binary_block( + name: &str, + version: Version, + source1: &ComputedHeightDateLast, + source2: &LazyBinaryBlockLast, + ) -> Self + where + F: BinaryTransform, + S1T: PartialOrd, + S2aT: ComputedVecValue + JsonSchema, + S2bT: ComputedVecValue + JsonSchema, + { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::( + name, + v, + source1.height.boxed_clone(), + source2.height.boxed_clone(), + ), + rest: LazyDerivedBlock2Last { + dates: LazyBinaryDateLast::from_computed_and_binary_last::( + name, + v, + &source1.rest, + &source2.rest.dates, + ), + difficultyepoch: LazyTransform2Last::from_vecs::( + name, + v, + source1.difficultyepoch.0.boxed_clone(), + source2.rest.difficultyepoch.boxed_clone(), + ), + }, + } + } + + /// Create from a ComputedHeightDateLast and a ComputedBlockLast. + pub fn from_computed_height_date_and_block_last>( + name: &str, + version: Version, + source1: &ComputedHeightDateLast, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: PartialOrd, + S2T: NumericValue, + { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::( + name, + v, + source1.height.boxed_clone(), + source2.height.boxed_clone(), + ), + rest: LazyDerivedBlock2Last::from_computed_height_date_and_block_last::( + name, v, source1, source2, + ), + } + } + + /// Create from a LazyBinaryBlockLast and a ComputedBlockLast. + pub fn from_binary_block_and_computed_block_last( + name: &str, + version: Version, + source1: &LazyBinaryBlockLast, + source2: &ComputedBlockLast, + ) -> Self + where + F: BinaryTransform, + S1aT: ComputedVecValue + JsonSchema, + S1bT: ComputedVecValue + JsonSchema, + S2T: NumericValue, + { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::( + name, + v, + source1.height.boxed_clone(), + source2.height.boxed_clone(), + ), + rest: LazyDerivedBlock2Last { + dates: LazyBinaryDateLast::from_binary_and_block_last::( + name, + v, + &source1.rest.dates, + source2, + ), + difficultyepoch: LazyTransform2Last::from_vecs::( + name, + v, + source1.rest.difficultyepoch.boxed_clone(), + source2.difficultyepoch.boxed_clone(), + ), + }, + } + } + + /// Create from two LazyBinaryBlockLast sources. + pub fn from_both_binary_block( + name: &str, + version: Version, + source1: &LazyBinaryBlockLast, + source2: &LazyBinaryBlockLast, + ) -> Self + where + F: BinaryTransform, + S1aT: ComputedVecValue + JsonSchema, + S1bT: ComputedVecValue + JsonSchema, + S2aT: ComputedVecValue + JsonSchema, + S2bT: ComputedVecValue + JsonSchema, + { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::( + name, + v, + source1.height.boxed_clone(), + source2.height.boxed_clone(), + ), + rest: LazyDerivedBlock2Last { + dates: LazyBinaryDateLast::from_both_binary_last::( + name, + v, + &source1.rest.dates, + &source2.rest.dates, + ), + difficultyepoch: LazyTransform2Last::from_vecs::( + name, + v, + source1.rest.difficultyepoch.boxed_clone(), + source2.rest.difficultyepoch.boxed_clone(), + ), + }, + } + } + + /// Create from separate height, difficultyepoch, and date sources. + /// + /// Use when sources are split across different types (e.g., ValueBlockDateLast + ComputedBlockLast). + #[allow(clippy::too_many_arguments)] + pub fn from_height_difficultyepoch_dates>( + name: &str, + version: Version, + height_source1: IterableBoxedVec, + height_source2: IterableBoxedVec, + difficultyepoch_source1: IterableBoxedVec, + difficultyepoch_source2: IterableBoxedVec, + dateindex_source1: IterableBoxedVec, + dates_source1: &DerivedDateLast, + dateindex_source2: IterableBoxedVec, + dates_source2: &DerivedDateLast, + ) -> Self { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), + rest: LazyDerivedBlock2Last { + dates: LazyBinaryDateLast::from_both_derived_last::( + name, + v, + dateindex_source1, + dates_source1, + dateindex_source2, + dates_source2, + ), + difficultyepoch: LazyTransform2Last::from_vecs::( + name, + v, + difficultyepoch_source1, + difficultyepoch_source2, + ), + }, + } + } +} diff --git a/crates/brk_computer/src/internal/lazy/block/binary_sum.rs b/crates/brk_computer/src/internal/lazy/block/binary_sum.rs index 41c03752b..7c8af0aaf 100644 --- a/crates/brk_computer/src/internal/lazy/block/binary_sum.rs +++ b/crates/brk_computer/src/internal/lazy/block/binary_sum.rs @@ -4,9 +4,9 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, LazyVecFrom2}; +use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; -use crate::internal::{ComputedVecValue, DerivedComputedBlockSum, NumericValue}; +use crate::internal::{ComputedBlockSum, ComputedVecValue, DerivedComputedBlockSum, NumericValue}; use super::super::derived_block::LazyDerivedBlock2Sum; @@ -20,11 +20,10 @@ where S1T: ComputedVecValue, S2T: ComputedVecValue, { - #[traversable(wrap = "base")] + #[traversable(wrap = "sum")] pub height: LazyVecFrom2, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: LazyDerivedBlock2Sum, } @@ -49,4 +48,23 @@ where rest: LazyDerivedBlock2Sum::from_derived::(name, v, source1, source2), } } + + pub fn from_computed>( + name: &str, + version: Version, + source1: &ComputedBlockSum, + source2: &ComputedBlockSum, + ) -> Self { + let v = version + VERSION; + + Self { + height: LazyVecFrom2::transformed::( + name, + v, + source1.height.boxed_clone(), + source2.height.boxed_clone(), + ), + rest: LazyDerivedBlock2Sum::from_derived::(name, v, &source1.rest, &source2.rest), + } + } } diff --git a/crates/brk_computer/src/internal/lazy/block/binary_sum_cum.rs b/crates/brk_computer/src/internal/lazy/block/binary_sum_cum.rs index 65f45a61a..e44da0247 100644 --- a/crates/brk_computer/src/internal/lazy/block/binary_sum_cum.rs +++ b/crates/brk_computer/src/internal/lazy/block/binary_sum_cum.rs @@ -6,7 +6,10 @@ use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2}; -use crate::internal::{ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockSumCum}; +use crate::internal::{ + ComputedBlockLast, ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockLast, + DerivedComputedBlockSumCum, NumericValue, +}; use super::super::derived_block::LazyDerivedBlock2SumCum; @@ -18,13 +21,12 @@ where S1T: ComputedVecValue, S2T: ComputedVecValue, { - #[traversable(wrap = "base")] + #[traversable(wrap = "sum")] pub height: LazyVecFrom2, - #[traversable(wrap = "cumulative")] + #[traversable(rename = "cumulative")] pub height_cumulative: LazyVecFrom2, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: LazyDerivedBlock2SumCum, } @@ -58,7 +60,7 @@ where source1.height_cumulative.0.boxed_clone(), source2.height_cumulative.0.boxed_clone(), ), - rest: LazyDerivedBlock2SumCum::from_computed::( + rest: LazyDerivedBlock2SumCum::from_computed_sum_raw::( name, v, &source1.dateindex, @@ -93,7 +95,7 @@ where source1.height_cumulative.0.boxed_clone(), source2.height_cumulative.0.boxed_clone(), ), - rest: LazyDerivedBlock2SumCum::from_computed::( + rest: LazyDerivedBlock2SumCum::from_computed_sum_raw::( name, v, &source1.dateindex, @@ -105,4 +107,106 @@ where ), } } + + // --- Methods accepting SumCum + Last sources --- + + pub fn from_computed_last>( + name: &str, + version: Version, + height_source1: IterableBoxedVec, + height_source2: IterableBoxedVec, + source1: &ComputedBlockSumCum, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: PartialOrd, + S2T: NumericValue, + { + let v = version + VERSION; + Self { + height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), + height_cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + v, + source1.height_cumulative.0.boxed_clone(), + source2.height.boxed_clone(), + ), + rest: LazyDerivedBlock2SumCum::from_computed_last::(name, v, source1, source2), + } + } + + pub fn from_derived_computed_last>( + name: &str, + version: Version, + height_source1: IterableBoxedVec, + height_source2: IterableBoxedVec, + source1: &DerivedComputedBlockSumCum, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + Self { + height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), + height_cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + v, + source1.height_cumulative.0.boxed_clone(), + source2.height.boxed_clone(), + ), + rest: LazyDerivedBlock2SumCum::from_derived_computed_last::(name, v, source1, source2), + } + } + + pub fn from_derived_last>( + name: &str, + version: Version, + height_source1: IterableBoxedVec, + height_source2: IterableBoxedVec, + source1: &DerivedComputedBlockSumCum, + source2: &DerivedComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + Self { + height: LazyVecFrom2::transformed::(name, v, height_source1.clone(), height_source2.clone()), + height_cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + v, + source1.height_cumulative.0.boxed_clone(), + height_source2, + ), + rest: LazyDerivedBlock2SumCum::from_derived_last::(name, v, source1, source2), + } + } + + pub fn from_computed_derived_last>( + name: &str, + version: Version, + height_source1: IterableBoxedVec, + height_source2: IterableBoxedVec, + source1: &ComputedBlockSumCum, + source2: &DerivedComputedBlockLast, + ) -> Self + where + S1T: PartialOrd, + S2T: NumericValue, + { + let v = version + VERSION; + Self { + height: LazyVecFrom2::transformed::(name, v, height_source1.clone(), height_source2.clone()), + height_cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + v, + source1.height_cumulative.0.boxed_clone(), + height_source2, + ), + rest: LazyDerivedBlock2SumCum::from_computed_derived_last::(name, v, source1, source2), + } + } } diff --git a/crates/brk_computer/src/internal/lazy/block/binary_sum_cum_last.rs b/crates/brk_computer/src/internal/lazy/block/binary_sum_cum_last.rs deleted file mode 100644 index 7411061dd..000000000 --- a/crates/brk_computer/src/internal/lazy/block/binary_sum_cum_last.rs +++ /dev/null @@ -1,121 +0,0 @@ -//! Lazy binary transform from SumCum + Last sources. - -use brk_traversable::Traversable; -use brk_types::{Height, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, LazyVecFrom2}; - -use crate::internal::{ - ComputedBlockLast, ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockLast, - DerivedComputedBlockSumCum, NumericValue, -}; - -use super::super::derived_block::LazyDerivedBlock2SumCumLast; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct BinaryBlockSumCumLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - #[traversable(wrap = "base")] - pub height: LazyVecFrom2, - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub rest: LazyDerivedBlock2SumCumLast, -} - -const VERSION: Version = Version::ZERO; - -impl BinaryBlockSumCumLast -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, - S2T: ComputedVecValue + JsonSchema, -{ - pub fn from_computed>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ComputedBlockSumCum, - source2: &ComputedBlockLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - rest: LazyDerivedBlock2SumCumLast::from_computed::(name, v, source1, source2), - } - } - - pub fn from_derived_computed>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &DerivedComputedBlockSumCum, - source2: &ComputedBlockLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - rest: LazyDerivedBlock2SumCumLast::from_derived_computed_full::( - name, v, source1, source2, - ), - } - } - - pub fn from_derived>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &DerivedComputedBlockSumCum, - source2: &DerivedComputedBlockLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - rest: LazyDerivedBlock2SumCumLast::from_derived_computed::( - name, v, source1, source2, - ), - } - } - - pub fn from_computed_derived>( - name: &str, - version: Version, - height_source1: IterableBoxedVec, - height_source2: IterableBoxedVec, - source1: &ComputedBlockSumCum, - source2: &DerivedComputedBlockLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - Self { - height: LazyVecFrom2::transformed::(name, v, height_source1, height_source2), - rest: LazyDerivedBlock2SumCumLast::from_computed_derived_computed::( - name, v, source1, source2, - ), - } - } -} diff --git a/crates/brk_computer/src/internal/lazy/block/computed_full.rs b/crates/brk_computer/src/internal/lazy/block/computed_full.rs new file mode 100644 index 000000000..5ab65e56a --- /dev/null +++ b/crates/brk_computer/src/internal/lazy/block/computed_full.rs @@ -0,0 +1,81 @@ +//! LazyComputedBlockFull - lazy height + DerivedComputedBlockFull. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{Database, Exit, IterableCloneableVec, LazyVecFrom1, UnaryTransform}; + +use crate::{ + ComputeIndexes, indexes, + internal::{ComputedVecValue, DerivedComputedBlockFull, NumericValue}, +}; + +const VERSION: Version = Version::ZERO; + +/// Lazy height transform + stored/computed derived indexes. +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct LazyComputedBlockFull +where + T: ComputedVecValue + PartialOrd + JsonSchema, + S: ComputedVecValue, +{ + #[traversable(rename = "base")] + pub height: LazyVecFrom1, + #[deref] + #[deref_mut] + pub rest: DerivedComputedBlockFull, +} + +impl LazyComputedBlockFull +where + T: NumericValue + JsonSchema, + S: ComputedVecValue + JsonSchema, +{ + pub fn forced_import>( + db: &Database, + name: &str, + version: Version, + source: impl IterableCloneableVec, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + + let height = LazyVecFrom1::transformed::(name, v, source.boxed_clone()); + + let rest = + DerivedComputedBlockFull::forced_import(db, name, height.boxed_clone(), v, indexes)?; + + Ok(Self { height, rest }) + } + + pub fn forced_import_with_init( + db: &Database, + name: &str, + version: Version, + source: impl IterableCloneableVec, + indexes: &indexes::Vecs, + init_fn: vecdb::ComputeFrom1, + ) -> Result { + let v = version + VERSION; + + let height = LazyVecFrom1::init(name, v, source.boxed_clone(), init_fn); + + let rest = + DerivedComputedBlockFull::forced_import(db, name, height.boxed_clone(), v, indexes)?; + + Ok(Self { height, rest }) + } + + pub fn derive_from( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.rest + .derive_from(indexes, starting_indexes, &self.height, exit) + } +} diff --git a/crates/brk_computer/src/internal/lazy/block/distribution.rs b/crates/brk_computer/src/internal/lazy/block/distribution.rs new file mode 100644 index 000000000..7bd3ef3c1 --- /dev/null +++ b/crates/brk_computer/src/internal/lazy/block/distribution.rs @@ -0,0 +1,95 @@ +//! LazyBlockDistribution - lazy height + derived distribution (avg/min/max) for indexes. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{ComputeFrom1, Database, Exit, IterableCloneableVec, LazyVecFrom1, UnaryTransform}; + +use crate::{ + ComputeIndexes, indexes, + internal::{ComputedVecValue, DerivedComputedBlockDistribution, NumericValue}, +}; + +const VERSION: Version = Version::ZERO; + +/// Lazy height + derived distribution for indexes. +/// +/// Height is a lazy transform from a source. +/// Indexes (dateindex + periods + difficultyepoch) store distribution stats (avg/min/max). +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct LazyBlockDistribution +where + T: ComputedVecValue + PartialOrd + JsonSchema, + S: ComputedVecValue, +{ + #[traversable(rename = "base")] + pub height: LazyVecFrom1, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub rest: DerivedComputedBlockDistribution, +} + +impl LazyBlockDistribution +where + T: NumericValue + JsonSchema, + S: ComputedVecValue + JsonSchema, +{ + pub fn forced_import>( + db: &Database, + name: &str, + version: Version, + source: impl IterableCloneableVec, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + + let height = LazyVecFrom1::transformed::(name, v, source.boxed_clone()); + + let rest = DerivedComputedBlockDistribution::forced_import( + db, + name, + height.boxed_clone(), + v, + indexes, + )?; + + Ok(Self { height, rest }) + } + + pub fn forced_import_with_init( + db: &Database, + name: &str, + version: Version, + source: impl IterableCloneableVec, + indexes: &indexes::Vecs, + init_fn: ComputeFrom1, + ) -> Result { + let v = version + VERSION; + + let height = LazyVecFrom1::init(name, v, source.boxed_clone(), init_fn); + + let rest = DerivedComputedBlockDistribution::forced_import( + db, + name, + height.boxed_clone(), + v, + indexes, + )?; + + Ok(Self { height, rest }) + } + + pub fn derive_from( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.rest + .derive_from(indexes, starting_indexes, &self.height, exit) + } +} diff --git a/crates/brk_computer/src/internal/lazy/block/full.rs b/crates/brk_computer/src/internal/lazy/block/full.rs index d45bb8a7d..752915e74 100644 --- a/crates/brk_computer/src/internal/lazy/block/full.rs +++ b/crates/brk_computer/src/internal/lazy/block/full.rs @@ -22,7 +22,6 @@ where pub height: LazyVecFrom1, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: LazyDerivedBlockFull, } diff --git a/crates/brk_computer/src/internal/lazy/block/last.rs b/crates/brk_computer/src/internal/lazy/block/last.rs index 24c6f25c1..46cc078cc 100644 --- a/crates/brk_computer/src/internal/lazy/block/last.rs +++ b/crates/brk_computer/src/internal/lazy/block/last.rs @@ -4,10 +4,11 @@ use brk_traversable::Traversable; use brk_types::{Height, Version}; use derive_more::{Deref, DerefMut}; use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform}; +use vecdb::{IterableBoxedVec, IterableCloneableVec, LazyVecFrom1, UnaryTransform}; use crate::internal::{ - ComputedBlockLast, ComputedVecValue, DerivedComputedBlockLast, NumericValue, + ComputedBlockLast, ComputedHeightDateLast, ComputedVecValue, DerivedComputedBlockLast, + NumericValue, }; use super::super::derived_block::LazyDerivedBlockLast; @@ -21,7 +22,6 @@ where pub height: LazyVecFrom1, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: LazyDerivedBlockLast, } @@ -63,4 +63,19 @@ where rest: LazyDerivedBlockLast::from_derived_computed::(name, v, source), } } + + pub fn from_computed_height_date>( + name: &str, + version: Version, + source: &ComputedHeightDateLast, + ) -> Self + where + S1T: PartialOrd, + { + let v = version + VERSION; + Self { + height: LazyVecFrom1::transformed::(name, v, source.height.boxed_clone()), + rest: LazyDerivedBlockLast::from_computed_height_date::(name, v, source), + } + } } diff --git a/crates/brk_computer/src/internal/lazy/block/mod.rs b/crates/brk_computer/src/internal/lazy/block/mod.rs index 710b64349..a71d694c3 100644 --- a/crates/brk_computer/src/internal/lazy/block/mod.rs +++ b/crates/brk_computer/src/internal/lazy/block/mod.rs @@ -1,16 +1,20 @@ mod binary_full; +mod binary_last; mod binary_sum; mod binary_sum_cum; -mod binary_sum_cum_last; +mod computed_full; +mod distribution; mod full; mod last; mod sum; mod sum_cum; pub use binary_full::*; +pub use binary_last::*; pub use binary_sum::*; pub use binary_sum_cum::*; -pub use binary_sum_cum_last::*; +pub use computed_full::*; +pub use distribution::*; pub use full::*; pub use last::*; pub use sum::*; diff --git a/crates/brk_computer/src/internal/lazy/block/sum.rs b/crates/brk_computer/src/internal/lazy/block/sum.rs index 655289428..c2368d63d 100644 --- a/crates/brk_computer/src/internal/lazy/block/sum.rs +++ b/crates/brk_computer/src/internal/lazy/block/sum.rs @@ -16,7 +16,6 @@ where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, { - #[traversable(wrap = "base")] pub height: LazyVecFrom1, #[deref] #[deref_mut] diff --git a/crates/brk_computer/src/internal/lazy/block/sum_cum.rs b/crates/brk_computer/src/internal/lazy/block/sum_cum.rs index 6b88f0d0f..35b1bbcc6 100644 --- a/crates/brk_computer/src/internal/lazy/block/sum_cum.rs +++ b/crates/brk_computer/src/internal/lazy/block/sum_cum.rs @@ -18,11 +18,10 @@ where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, { - #[traversable(wrap = "base")] + #[traversable(wrap = "sum")] pub height: LazyVecFrom1, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: LazyDerivedBlockSumCum, } diff --git a/crates/brk_computer/src/internal/lazy/date/binary_height_date_last.rs b/crates/brk_computer/src/internal/lazy/date/binary_height_date_last.rs new file mode 100644 index 000000000..d726b78e1 --- /dev/null +++ b/crates/brk_computer/src/internal/lazy/date/binary_height_date_last.rs @@ -0,0 +1,76 @@ +//! BinaryHeightDateLast - height storage + binary transform lazy date periods. +//! +//! Use this when height is stored as EagerVec and date periods are lazy binary transforms. + +use brk_traversable::Traversable; +use brk_types::{Height, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{BinaryTransform, EagerVec, PcoVec}; + +use crate::internal::{ + ComputedDateLast, ComputedHeightDateLast, ComputedVecValue, LazyBinaryDateLast, +}; + +const VERSION: Version = Version::ZERO; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct BinaryHeightDateLast +where + T: ComputedVecValue + PartialOrd + JsonSchema, + S1T: ComputedVecValue, + S2T: ComputedVecValue, +{ + pub height: EagerVec>, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub rest: LazyBinaryDateLast, +} + +impl BinaryHeightDateLast +where + T: ComputedVecValue + JsonSchema + 'static, + S1T: ComputedVecValue + JsonSchema, + S2T: ComputedVecValue + JsonSchema, +{ + pub fn from_computed_both_last>( + name: &str, + version: Version, + height: EagerVec>, + source1: &ComputedDateLast, + source2: &ComputedDateLast, + ) -> Self { + let v = version + VERSION; + + Self { + height, + rest: LazyBinaryDateLast::from_computed_both_last::(name, v, source1, source2), + } + } + + pub fn from_computed_height_date_last>( + name: &str, + version: Version, + height: EagerVec>, + source1: &ComputedHeightDateLast, + source2: &ComputedHeightDateLast, + ) -> Self + where + S1T: JsonSchema + 'static, + S2T: JsonSchema + 'static, + { + let v = version + VERSION; + + Self { + height, + rest: LazyBinaryDateLast::from_computed_both_last::( + name, + v, + &source1.rest, + &source2.rest, + ), + } + } +} diff --git a/crates/brk_computer/src/internal/lazy/date/binary_last.rs b/crates/brk_computer/src/internal/lazy/date/binary_last.rs index 35543fa93..6109a15c3 100644 --- a/crates/brk_computer/src/internal/lazy/date/binary_last.rs +++ b/crates/brk_computer/src/internal/lazy/date/binary_last.rs @@ -9,7 +9,7 @@ use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom use crate::internal::{ ComputedBlockLast, ComputedBlockSum, ComputedDateLast, ComputedVecValue, DerivedDateLast, - NumericValue, + DerivedDateSumCum, NumericValue, }; use super::super::transform::LazyTransform2Last; @@ -18,7 +18,7 @@ const VERSION: Version = Version::ZERO; #[derive(Clone, Traversable)] #[traversable(merge)] -pub struct BinaryDateLast +pub struct LazyBinaryDateLast where T: ComputedVecValue + PartialOrd + JsonSchema, S1T: ComputedVecValue, @@ -33,7 +33,7 @@ where pub decadeindex: LazyTransform2Last, } -impl BinaryDateLast +impl LazyBinaryDateLast where T: ComputedVecValue + JsonSchema + 'static, S1T: ComputedVecValue + JsonSchema, @@ -47,6 +47,17 @@ where ) -> Self { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_lazy_last::( + name, + v, + &source1.$p, + &source2.$p, + ) + }; + } + Self { dateindex: LazyVecFrom2::transformed::( name, @@ -54,42 +65,12 @@ where source1.dateindex.boxed_clone(), source2.dateindex.boxed_clone(), ), - weekindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.weekindex, - &source2.weekindex, - ), - monthindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.monthindex, - &source2.monthindex, - ), - quarterindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.quarterindex, - &source2.quarterindex, - ), - semesterindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.semesterindex, - &source2.semesterindex, - ), - yearindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.yearindex, - &source2.yearindex, - ), - decadeindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.decadeindex, - &source2.decadeindex, - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } @@ -102,6 +83,17 @@ where ) -> Self { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_lazy_last::( + name, + v, + &source1.$p, + &source2.$p, + ) + }; + } + Self { dateindex: LazyVecFrom2::transformed::( name, @@ -109,42 +101,51 @@ where dateindex_source1, source2.dateindex.boxed_clone(), ), - weekindex: LazyTransform2Last::from_lazy_last::( + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + pub fn from_derived_last_and_block_last>( + name: &str, + version: Version, + dateindex_source1: IterableBoxedVec, + source1: &DerivedDateLast, + source2: &ComputedBlockLast, + ) -> Self + where + S2T: NumericValue, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_lazy_last::( + name, + v, + &source1.$p, + &source2.$p, + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( name, v, - &source1.weekindex, - &source2.weekindex, - ), - monthindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.monthindex, - &source2.monthindex, - ), - quarterindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.quarterindex, - &source2.quarterindex, - ), - semesterindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.semesterindex, - &source2.semesterindex, - ), - yearindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.yearindex, - &source2.yearindex, - ), - decadeindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.decadeindex, - &source2.decadeindex, + dateindex_source1, + source2.dateindex.0.boxed_clone(), ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } @@ -158,6 +159,17 @@ where ) -> Self { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_lazy_last::( + name, + v, + &source1.$p, + &source2.$p, + ) + }; + } + Self { dateindex: LazyVecFrom2::transformed::( name, @@ -165,42 +177,12 @@ where dateindex_source1, dateindex_source2, ), - weekindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.weekindex, - &source2.weekindex, - ), - monthindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.monthindex, - &source2.monthindex, - ), - quarterindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.quarterindex, - &source2.quarterindex, - ), - semesterindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.semesterindex, - &source2.semesterindex, - ), - yearindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.yearindex, - &source2.yearindex, - ), - decadeindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.decadeindex, - &source2.decadeindex, - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } @@ -215,6 +197,17 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_lazy_last::( + name, + v, + &source1.$p, + &source2.$p, + ) + }; + } + Self { dateindex: LazyVecFrom2::transformed::( name, @@ -222,42 +215,89 @@ where source1.dateindex.0.boxed_clone(), source2.dateindex.boxed_clone(), ), - weekindex: LazyTransform2Last::from_lazy_last::( + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + pub fn from_dateindex_and_height_last>( + name: &str, + version: Version, + source1: &ComputedDateLast, + source2: &ComputedBlockLast, + ) -> Self + where + S2T: NumericValue, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_lazy_last::( + name, + v, + &source1.$p, + &source2.$p, + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( name, v, - &source1.weekindex, - &source2.weekindex, + source1.dateindex.boxed_clone(), + source2.dateindex.0.boxed_clone(), ), - monthindex: LazyTransform2Last::from_lazy_last::( + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + pub fn from_both_block_last>( + name: &str, + version: Version, + source1: &ComputedBlockLast, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_lazy_last::( + name, + v, + &source1.$p, + &source2.$p, + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( name, v, - &source1.monthindex, - &source2.monthindex, - ), - quarterindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.quarterindex, - &source2.quarterindex, - ), - semesterindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.semesterindex, - &source2.semesterindex, - ), - yearindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.yearindex, - &source2.yearindex, - ), - decadeindex: LazyTransform2Last::from_lazy_last::( - name, - v, - &source1.decadeindex, - &source2.decadeindex, + source1.dateindex.0.boxed_clone(), + source2.dateindex.0.boxed_clone(), ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } @@ -272,6 +312,17 @@ where { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_vecs::( + name, + v, + source1.$p.boxed_clone(), + source2.$p.boxed_clone(), + ) + }; + } + Self { dateindex: LazyVecFrom2::transformed::( name, @@ -279,42 +330,301 @@ where source1.dateindex.boxed_clone(), source2.dateindex.0.boxed_clone(), ), - weekindex: LazyTransform2Last::from_vecs::( + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + pub fn from_block_last_and_height_sum>( + name: &str, + version: Version, + source1: &ComputedBlockLast, + source2: &ComputedBlockSum, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_vecs::( + name, + v, + source1.$p.boxed_clone(), + source2.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( name, v, - source1.weekindex.boxed_clone(), - source2.weekindex.boxed_clone(), + source1.dateindex.0.boxed_clone(), + source2.dateindex.0.boxed_clone(), ), - monthindex: LazyTransform2Last::from_vecs::( + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + pub fn from_both_sum_cum_cumulatives>( + name: &str, + version: Version, + dateindex_source1: IterableBoxedVec, + dates1: &DerivedDateSumCum, + dateindex_source2: IterableBoxedVec, + dates2: &DerivedDateSumCum, + ) -> Self + where + S1T: PartialOrd, + S2T: PartialOrd, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_vecs::( + name, + v, + dates1.$p.cumulative.boxed_clone(), + dates2.$p.cumulative.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( name, v, - source1.monthindex.boxed_clone(), - source2.monthindex.boxed_clone(), + dateindex_source1, + dateindex_source2, ), - quarterindex: LazyTransform2Last::from_vecs::( + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + /// Create from a DerivedDateLast source and a BinaryDateLast source. + pub fn from_derived_last_and_binary_last( + name: &str, + version: Version, + dateindex_source1: IterableBoxedVec, + source1: &DerivedDateLast, + source2: &LazyBinaryDateLast, + ) -> Self + where + F: BinaryTransform, + S2aT: ComputedVecValue + JsonSchema, + S2bT: ComputedVecValue + JsonSchema, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_vecs::( + name, + v, + source1.$p.boxed_clone(), + source2.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( name, v, - source1.quarterindex.boxed_clone(), - source2.quarterindex.boxed_clone(), + dateindex_source1, + source2.dateindex.boxed_clone(), ), - semesterindex: LazyTransform2Last::from_vecs::( + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + /// Create from a BinaryDateLast source and a ComputedDateLast source. + pub fn from_binary_and_computed_last( + name: &str, + version: Version, + source1: &LazyBinaryDateLast, + source2: &ComputedDateLast, + ) -> Self + where + F: BinaryTransform, + S1aT: ComputedVecValue + JsonSchema, + S1bT: ComputedVecValue + JsonSchema, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_vecs::( + name, + v, + source1.$p.boxed_clone(), + source2.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( name, v, - source1.semesterindex.boxed_clone(), - source2.semesterindex.boxed_clone(), + source1.dateindex.boxed_clone(), + source2.dateindex.boxed_clone(), ), - yearindex: LazyTransform2Last::from_vecs::( + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + /// Create from a BinaryDateLast source and a ComputedBlockLast source. + pub fn from_binary_and_block_last( + name: &str, + version: Version, + source1: &LazyBinaryDateLast, + source2: &ComputedBlockLast, + ) -> Self + where + F: BinaryTransform, + S1aT: ComputedVecValue + JsonSchema, + S1bT: ComputedVecValue + JsonSchema, + S2T: NumericValue, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_vecs::( + name, + v, + source1.$p.boxed_clone(), + source2.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( name, v, - source1.yearindex.boxed_clone(), - source2.yearindex.boxed_clone(), + source1.dateindex.boxed_clone(), + source2.dateindex.0.boxed_clone(), ), - decadeindex: LazyTransform2Last::from_vecs::( + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + /// Create from a ComputedDateLast source and a BinaryDateLast source. + pub fn from_computed_and_binary_last( + name: &str, + version: Version, + source1: &ComputedDateLast, + source2: &LazyBinaryDateLast, + ) -> Self + where + F: BinaryTransform, + S2aT: ComputedVecValue + JsonSchema, + S2bT: ComputedVecValue + JsonSchema, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_vecs::( + name, + v, + source1.$p.boxed_clone(), + source2.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( name, v, - source1.decadeindex.boxed_clone(), - source2.decadeindex.boxed_clone(), + source1.dateindex.boxed_clone(), + source2.dateindex.boxed_clone(), ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + /// Create from two BinaryDateLast sources. + pub fn from_both_binary_last( + name: &str, + version: Version, + source1: &LazyBinaryDateLast, + source2: &LazyBinaryDateLast, + ) -> Self + where + F: BinaryTransform, + S1aT: ComputedVecValue + JsonSchema, + S1bT: ComputedVecValue + JsonSchema, + S2aT: ComputedVecValue + JsonSchema, + S2bT: ComputedVecValue + JsonSchema, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2Last::from_vecs::( + name, + v, + source1.$p.boxed_clone(), + source2.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyVecFrom2::transformed::( + name, + v, + source1.dateindex.boxed_clone(), + source2.dateindex.boxed_clone(), + ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/lazy/date/binary_sum.rs b/crates/brk_computer/src/internal/lazy/date/binary_sum.rs index 1c8befc8b..4e0035609 100644 --- a/crates/brk_computer/src/internal/lazy/date/binary_sum.rs +++ b/crates/brk_computer/src/internal/lazy/date/binary_sum.rs @@ -42,49 +42,20 @@ where ) -> Self { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransform2Sum::from_boxed::(name, v, source1.$p.boxed_clone(), source2.$p.boxed_clone()) + }; + } + Self { - dateindex: LazyTransform2Sum::from_sum::( - name, - v, - &source1.dateindex, - &source2.dateindex, - ), - weekindex: LazyTransform2Sum::from_boxed::( - name, - v, - source1.weekindex.boxed_clone(), - source2.weekindex.boxed_clone(), - ), - monthindex: LazyTransform2Sum::from_boxed::( - name, - v, - source1.monthindex.boxed_clone(), - source2.monthindex.boxed_clone(), - ), - quarterindex: LazyTransform2Sum::from_boxed::( - name, - v, - source1.quarterindex.boxed_clone(), - source2.quarterindex.boxed_clone(), - ), - semesterindex: LazyTransform2Sum::from_boxed::( - name, - v, - source1.semesterindex.boxed_clone(), - source2.semesterindex.boxed_clone(), - ), - yearindex: LazyTransform2Sum::from_boxed::( - name, - v, - source1.yearindex.boxed_clone(), - source2.yearindex.boxed_clone(), - ), - decadeindex: LazyTransform2Sum::from_boxed::( - name, - v, - source1.decadeindex.boxed_clone(), - source2.decadeindex.boxed_clone(), - ), + dateindex: LazyTransform2Sum::from_sum::(name, v, &source1.dateindex, &source2.dateindex), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/lazy/date/binary_sum_cum.rs b/crates/brk_computer/src/internal/lazy/date/binary_sum_cum.rs index a31026d6f..802af2f41 100644 --- a/crates/brk_computer/src/internal/lazy/date/binary_sum_cum.rs +++ b/crates/brk_computer/src/internal/lazy/date/binary_sum_cum.rs @@ -7,7 +7,10 @@ use brk_types::{ use schemars::JsonSchema; use vecdb::{BinaryTransform, IterableCloneableVec}; -use crate::internal::{ComputedVecValue, DerivedDateFull, DerivedDateSumCum, SumCum}; +use crate::internal::{ + ComputedBlockLast, ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockLast, + DerivedComputedBlockSumCum, DerivedDateFull, DerivedDateSumCum, NumericValue, SumCum, +}; use super::super::transform::LazyTransform2SumCum; @@ -47,56 +50,24 @@ where ) -> Self { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransform2SumCum::from_sources::( + name, v, + periods1.$p.sum.boxed_clone(), periods2.$p.sum.boxed_clone(), + periods1.$p.cumulative.boxed_clone(), periods2.$p.cumulative.boxed_clone(), + ) + }; + } + Self { dateindex: LazyTransform2SumCum::from_sum_cum::(name, v, dateindex1, dateindex2), - weekindex: LazyTransform2SumCum::from_sources::( - name, - v, - periods1.weekindex.sum.boxed_clone(), - periods2.weekindex.sum.boxed_clone(), - periods1.weekindex.cumulative.boxed_clone(), - periods2.weekindex.cumulative.boxed_clone(), - ), - monthindex: LazyTransform2SumCum::from_sources::( - name, - v, - periods1.monthindex.sum.boxed_clone(), - periods2.monthindex.sum.boxed_clone(), - periods1.monthindex.cumulative.boxed_clone(), - periods2.monthindex.cumulative.boxed_clone(), - ), - quarterindex: LazyTransform2SumCum::from_sources::( - name, - v, - periods1.quarterindex.sum.boxed_clone(), - periods2.quarterindex.sum.boxed_clone(), - periods1.quarterindex.cumulative.boxed_clone(), - periods2.quarterindex.cumulative.boxed_clone(), - ), - semesterindex: LazyTransform2SumCum::from_sources::( - name, - v, - periods1.semesterindex.sum.boxed_clone(), - periods2.semesterindex.sum.boxed_clone(), - periods1.semesterindex.cumulative.boxed_clone(), - periods2.semesterindex.cumulative.boxed_clone(), - ), - yearindex: LazyTransform2SumCum::from_sources::( - name, - v, - periods1.yearindex.sum.boxed_clone(), - periods2.yearindex.sum.boxed_clone(), - periods1.yearindex.cumulative.boxed_clone(), - periods2.yearindex.cumulative.boxed_clone(), - ), - decadeindex: LazyTransform2SumCum::from_sources::( - name, - v, - periods1.decadeindex.sum.boxed_clone(), - periods2.decadeindex.sum.boxed_clone(), - periods1.decadeindex.cumulative.boxed_clone(), - periods2.decadeindex.cumulative.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } @@ -110,44 +81,202 @@ where ) -> Self { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransform2SumCum::from_lazy_stats_aggregate::( + name, v, &dates1.$p, &dates2.$p, + ) + }; + } + Self { dateindex: LazyTransform2SumCum::from_sum_cum::(name, v, dateindex1, dateindex2), - weekindex: LazyTransform2SumCum::from_lazy_stats_aggregate::( - name, - v, - &dates1.weekindex, - &dates2.weekindex, + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + // --- Raw variants (no _sum suffix) for pure SumCum types --- + + #[allow(clippy::too_many_arguments)] + pub fn from_computed_sum_raw>( + name: &str, + version: Version, + dateindex1: &SumCum, + periods1: &DerivedDateSumCum, + dateindex2: &SumCum, + periods2: &DerivedDateSumCum, + ) -> Self { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2SumCum::from_sources_sum_raw::( + name, v, + periods1.$p.sum.boxed_clone(), periods2.$p.sum.boxed_clone(), + periods1.$p.cumulative.boxed_clone(), periods2.$p.cumulative.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyTransform2SumCum::from_sum_cum_sum_raw::(name, v, dateindex1, dateindex2), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + // --- Methods accepting SumCum + Last sources --- + + pub fn from_computed_last>( + name: &str, + version: Version, + source1: &ComputedBlockSumCum, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: PartialOrd, + S2T: NumericValue, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2SumCum::from_sources_last_sum_raw::( + name, v, + source1.rest.$p.sum.boxed_clone(), + source1.rest.$p.cumulative.boxed_clone(), + source2.rest.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyTransform2SumCum::from_sum_cum_last_sum_raw::( + name, v, &source1.dateindex, &source2.dateindex, ), - monthindex: LazyTransform2SumCum::from_lazy_stats_aggregate::( - name, - v, - &dates1.monthindex, - &dates2.monthindex, + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + pub fn from_derived_computed_last>( + name: &str, + version: Version, + source1: &DerivedComputedBlockSumCum, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2SumCum::from_sources_last_sum_raw::( + name, v, + source1.$p.sum.boxed_clone(), + source1.$p.cumulative.boxed_clone(), + source2.rest.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyTransform2SumCum::from_sum_cum_last_sum_raw::( + name, v, &source1.dateindex, &source2.dateindex, ), - quarterindex: LazyTransform2SumCum::from_lazy_stats_aggregate::( - name, - v, - &dates1.quarterindex, - &dates2.quarterindex, + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + pub fn from_computed_derived_last>( + name: &str, + version: Version, + source1: &ComputedBlockSumCum, + source2: &DerivedComputedBlockLast, + ) -> Self + where + S1T: PartialOrd, + S2T: NumericValue, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2SumCum::from_sources_last_sum_raw::( + name, v, + source1.rest.$p.sum.boxed_clone(), + source1.rest.$p.cumulative.boxed_clone(), + source2.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyTransform2SumCum::from_sum_cum_last_sum_raw::( + name, v, &source1.dateindex, &source2.dateindex, ), - semesterindex: LazyTransform2SumCum::from_lazy_stats_aggregate::( - name, - v, - &dates1.semesterindex, - &dates2.semesterindex, - ), - yearindex: LazyTransform2SumCum::from_lazy_stats_aggregate::( - name, - v, - &dates1.yearindex, - &dates2.yearindex, - ), - decadeindex: LazyTransform2SumCum::from_lazy_stats_aggregate::( - name, - v, - &dates1.decadeindex, - &dates2.decadeindex, + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), + } + } + + pub fn from_derived_last>( + name: &str, + version: Version, + source1: &DerivedComputedBlockSumCum, + source2: &DerivedComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransform2SumCum::from_sources_last_sum_raw::( + name, v, + source1.$p.sum.boxed_clone(), + source1.$p.cumulative.boxed_clone(), + source2.$p.boxed_clone(), + ) + }; + } + + Self { + dateindex: LazyTransform2SumCum::from_sum_cum_last_sum_raw::( + name, v, &source1.dateindex, &source2.dateindex, ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/lazy/date/binary_sum_cum_last.rs b/crates/brk_computer/src/internal/lazy/date/binary_sum_cum_last.rs deleted file mode 100644 index e82b456b6..000000000 --- a/crates/brk_computer/src/internal/lazy/date/binary_sum_cum_last.rs +++ /dev/null @@ -1,297 +0,0 @@ -//! Binary transform for SumCum + Last pattern across date periods. - -use brk_traversable::Traversable; -use brk_types::{ - DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, -}; -use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableCloneableVec}; - -use crate::internal::{ - ComputedBlockLast, ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockLast, - DerivedComputedBlockSumCum, NumericValue, -}; - -use super::super::transform::LazyTransform2SumCumLast; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Traversable)] -#[traversable(merge)] -pub struct LazyDate2SumCumLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - pub dateindex: LazyTransform2SumCumLast, - pub weekindex: LazyTransform2SumCumLast, - pub monthindex: LazyTransform2SumCumLast, - pub quarterindex: LazyTransform2SumCumLast, - pub semesterindex: LazyTransform2SumCumLast, - pub yearindex: LazyTransform2SumCumLast, - pub decadeindex: LazyTransform2SumCumLast, -} - -impl LazyDate2SumCumLast -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, - S2T: ComputedVecValue + JsonSchema, -{ - pub fn from_computed>( - name: &str, - version: Version, - source1: &ComputedBlockSumCum, - source2: &ComputedBlockLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - dateindex: LazyTransform2SumCumLast::from_sources::( - name, - v, - &source1.dateindex, - &source2.dateindex, - ), - weekindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.weekindex.sum.boxed_clone(), - source1.rest.weekindex.cumulative.boxed_clone(), - source2.rest.weekindex.boxed_clone(), - ), - monthindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.monthindex.sum.boxed_clone(), - source1.rest.monthindex.cumulative.boxed_clone(), - source2.rest.monthindex.boxed_clone(), - ), - quarterindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.quarterindex.sum.boxed_clone(), - source1.rest.quarterindex.cumulative.boxed_clone(), - source2.rest.quarterindex.boxed_clone(), - ), - semesterindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.semesterindex.sum.boxed_clone(), - source1.rest.semesterindex.cumulative.boxed_clone(), - source2.rest.semesterindex.boxed_clone(), - ), - yearindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.yearindex.sum.boxed_clone(), - source1.rest.yearindex.cumulative.boxed_clone(), - source2.rest.yearindex.boxed_clone(), - ), - decadeindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.decadeindex.sum.boxed_clone(), - source1.rest.decadeindex.cumulative.boxed_clone(), - source2.rest.decadeindex.boxed_clone(), - ), - } - } - - pub fn from_derived_computed_full>( - name: &str, - version: Version, - source1: &DerivedComputedBlockSumCum, - source2: &ComputedBlockLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - dateindex: LazyTransform2SumCumLast::from_sources::( - name, - v, - &source1.dateindex, - &source2.dateindex, - ), - weekindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.weekindex.sum.boxed_clone(), - source1.weekindex.cumulative.boxed_clone(), - source2.rest.weekindex.boxed_clone(), - ), - monthindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.monthindex.sum.boxed_clone(), - source1.monthindex.cumulative.boxed_clone(), - source2.rest.monthindex.boxed_clone(), - ), - quarterindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.quarterindex.sum.boxed_clone(), - source1.quarterindex.cumulative.boxed_clone(), - source2.rest.quarterindex.boxed_clone(), - ), - semesterindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.semesterindex.sum.boxed_clone(), - source1.semesterindex.cumulative.boxed_clone(), - source2.rest.semesterindex.boxed_clone(), - ), - yearindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.yearindex.sum.boxed_clone(), - source1.yearindex.cumulative.boxed_clone(), - source2.rest.yearindex.boxed_clone(), - ), - decadeindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.decadeindex.sum.boxed_clone(), - source1.decadeindex.cumulative.boxed_clone(), - source2.rest.decadeindex.boxed_clone(), - ), - } - } - - pub fn from_computed_derived_computed>( - name: &str, - version: Version, - source1: &ComputedBlockSumCum, - source2: &DerivedComputedBlockLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - dateindex: LazyTransform2SumCumLast::from_sources::( - name, - v, - &source1.dateindex, - &source2.dateindex, - ), - weekindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.weekindex.sum.boxed_clone(), - source1.rest.weekindex.cumulative.boxed_clone(), - source2.weekindex.boxed_clone(), - ), - monthindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.monthindex.sum.boxed_clone(), - source1.rest.monthindex.cumulative.boxed_clone(), - source2.monthindex.boxed_clone(), - ), - quarterindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.quarterindex.sum.boxed_clone(), - source1.rest.quarterindex.cumulative.boxed_clone(), - source2.quarterindex.boxed_clone(), - ), - semesterindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.semesterindex.sum.boxed_clone(), - source1.rest.semesterindex.cumulative.boxed_clone(), - source2.semesterindex.boxed_clone(), - ), - yearindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.yearindex.sum.boxed_clone(), - source1.rest.yearindex.cumulative.boxed_clone(), - source2.yearindex.boxed_clone(), - ), - decadeindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.rest.decadeindex.sum.boxed_clone(), - source1.rest.decadeindex.cumulative.boxed_clone(), - source2.decadeindex.boxed_clone(), - ), - } - } - - pub fn from_derived_computed>( - name: &str, - version: Version, - source1: &DerivedComputedBlockSumCum, - source2: &DerivedComputedBlockLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - dateindex: LazyTransform2SumCumLast::from_sources::( - name, - v, - &source1.dateindex, - &source2.dateindex, - ), - weekindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.weekindex.sum.boxed_clone(), - source1.weekindex.cumulative.boxed_clone(), - source2.weekindex.boxed_clone(), - ), - monthindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.monthindex.sum.boxed_clone(), - source1.monthindex.cumulative.boxed_clone(), - source2.monthindex.boxed_clone(), - ), - quarterindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.quarterindex.sum.boxed_clone(), - source1.quarterindex.cumulative.boxed_clone(), - source2.quarterindex.boxed_clone(), - ), - semesterindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.semesterindex.sum.boxed_clone(), - source1.semesterindex.cumulative.boxed_clone(), - source2.semesterindex.boxed_clone(), - ), - yearindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.yearindex.sum.boxed_clone(), - source1.yearindex.cumulative.boxed_clone(), - source2.yearindex.boxed_clone(), - ), - decadeindex: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.decadeindex.sum.boxed_clone(), - source1.decadeindex.cumulative.boxed_clone(), - source2.decadeindex.boxed_clone(), - ), - } - } -} diff --git a/crates/brk_computer/src/internal/lazy/date/full.rs b/crates/brk_computer/src/internal/lazy/date/full.rs index 99062ae20..78a3a16c8 100644 --- a/crates/brk_computer/src/internal/lazy/date/full.rs +++ b/crates/brk_computer/src/internal/lazy/date/full.rs @@ -7,7 +7,7 @@ use vecdb::{IterableCloneableVec, UnaryTransform}; use crate::internal::{ComputedVecValue, DerivedDateFull, Full}; -use super::super::transform::LazyTransformFull; +use super::super::transform::{LazyTransformFull, LazyTransformStats}; const VERSION: Version = Version::ZERO; @@ -19,12 +19,12 @@ where S1T: ComputedVecValue, { pub dateindex: LazyTransformFull, - pub weekindex: LazyTransformFull, - pub monthindex: LazyTransformFull, - pub quarterindex: LazyTransformFull, - pub semesterindex: LazyTransformFull, - pub yearindex: LazyTransformFull, - pub decadeindex: LazyTransformFull, + pub weekindex: LazyTransformStats, + pub monthindex: LazyTransformStats, + pub quarterindex: LazyTransformStats, + pub semesterindex: LazyTransformStats, + pub yearindex: LazyTransformStats, + pub decadeindex: LazyTransformStats, } impl LazyDateFull @@ -39,63 +39,26 @@ where source: &DerivedDateFull, ) -> Self { let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransformStats::from_boxed::( + name, v, + source.$p.average.boxed_clone(), source.$p.min.boxed_clone(), + source.$p.max.boxed_clone(), source.$p.sum.boxed_clone(), + source.$p.cumulative.boxed_clone(), + ) + }; + } + Self { dateindex: LazyTransformFull::from_stats_aggregate::(name, v, dateindex), - weekindex: LazyTransformFull::from_boxed::( - name, - v, - source.weekindex.average.boxed_clone(), - source.weekindex.min.boxed_clone(), - source.weekindex.max.boxed_clone(), - source.weekindex.sum.boxed_clone(), - source.weekindex.cumulative.boxed_clone(), - ), - monthindex: LazyTransformFull::from_boxed::( - name, - v, - source.monthindex.average.boxed_clone(), - source.monthindex.min.boxed_clone(), - source.monthindex.max.boxed_clone(), - source.monthindex.sum.boxed_clone(), - source.monthindex.cumulative.boxed_clone(), - ), - quarterindex: LazyTransformFull::from_boxed::( - name, - v, - source.quarterindex.average.boxed_clone(), - source.quarterindex.min.boxed_clone(), - source.quarterindex.max.boxed_clone(), - source.quarterindex.sum.boxed_clone(), - source.quarterindex.cumulative.boxed_clone(), - ), - semesterindex: LazyTransformFull::from_boxed::( - name, - v, - source.semesterindex.average.boxed_clone(), - source.semesterindex.min.boxed_clone(), - source.semesterindex.max.boxed_clone(), - source.semesterindex.sum.boxed_clone(), - source.semesterindex.cumulative.boxed_clone(), - ), - yearindex: LazyTransformFull::from_boxed::( - name, - v, - source.yearindex.average.boxed_clone(), - source.yearindex.min.boxed_clone(), - source.yearindex.max.boxed_clone(), - source.yearindex.sum.boxed_clone(), - source.yearindex.cumulative.boxed_clone(), - ), - decadeindex: LazyTransformFull::from_boxed::( - name, - v, - source.decadeindex.average.boxed_clone(), - source.decadeindex.min.boxed_clone(), - source.decadeindex.max.boxed_clone(), - source.decadeindex.sum.boxed_clone(), - source.decadeindex.cumulative.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } - } diff --git a/crates/brk_computer/src/internal/lazy/date/last.rs b/crates/brk_computer/src/internal/lazy/date/last.rs index 89bfab06d..85ffab409 100644 --- a/crates/brk_computer/src/internal/lazy/date/last.rs +++ b/crates/brk_computer/src/internal/lazy/date/last.rs @@ -7,7 +7,7 @@ use brk_types::{ use schemars::JsonSchema; use vecdb::{IterableBoxedVec, IterableCloneableVec, UnaryTransform}; -use crate::internal::{ComputedDateLast, ComputedVecValue, DerivedDateLast}; +use crate::internal::{ComputedBlockLast, ComputedDateLast, ComputedVecValue, DerivedDateLast, NumericValue}; use super::super::transform::LazyTransformLast; @@ -58,22 +58,32 @@ where source: &DerivedDateLast, ) -> Self { let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransformLast::from_lazy_last::(name, v, &source.$p) + }; + } + Self { dateindex: LazyTransformLast::from_boxed::(name, v, dateindex_source), - weekindex: LazyTransformLast::from_lazy_last::(name, v, &source.weekindex), - monthindex: LazyTransformLast::from_lazy_last::(name, v, &source.monthindex), - quarterindex: LazyTransformLast::from_lazy_last::( - name, - v, - &source.quarterindex, - ), - semesterindex: LazyTransformLast::from_lazy_last::( - name, - v, - &source.semesterindex, - ), - yearindex: LazyTransformLast::from_lazy_last::(name, v, &source.yearindex), - decadeindex: LazyTransformLast::from_lazy_last::(name, v, &source.decadeindex), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } + + pub fn from_block_source>( + name: &str, + version: Version, + source: &ComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + { + Self::from_derived::(name, version, source.dateindex.0.boxed_clone(), &source.dates) + } } diff --git a/crates/brk_computer/src/internal/lazy/date/mod.rs b/crates/brk_computer/src/internal/lazy/date/mod.rs index 2273f7385..caf157d67 100644 --- a/crates/brk_computer/src/internal/lazy/date/mod.rs +++ b/crates/brk_computer/src/internal/lazy/date/mod.rs @@ -1,16 +1,16 @@ +mod binary_height_date_last; mod binary_last; mod binary_sum; mod binary_sum_cum; -mod binary_sum_cum_last; mod full; mod last; mod sum; mod sum_cum; +pub use binary_height_date_last::*; pub use binary_last::*; pub use binary_sum::*; pub use binary_sum_cum::*; -pub use binary_sum_cum_last::*; pub use full::*; pub use last::*; pub use sum::*; diff --git a/crates/brk_computer/src/internal/lazy/date/sum.rs b/crates/brk_computer/src/internal/lazy/date/sum.rs index 340a2ccd2..e92ba98e5 100644 --- a/crates/brk_computer/src/internal/lazy/date/sum.rs +++ b/crates/brk_computer/src/internal/lazy/date/sum.rs @@ -39,15 +39,21 @@ where source: &DerivedDateSum, ) -> Self { let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransformSum::from_boxed::(name, v, source.$p.boxed_clone()) + }; + } + Self { dateindex: LazyTransformSum::from_boxed::(name, v, dateindex_source), - weekindex: LazyTransformSum::from_boxed::(name, v, source.weekindex.boxed_clone()), - monthindex: LazyTransformSum::from_boxed::(name, v, source.monthindex.boxed_clone()), - quarterindex: LazyTransformSum::from_boxed::(name, v, source.quarterindex.boxed_clone()), - semesterindex: LazyTransformSum::from_boxed::(name, v, source.semesterindex.boxed_clone()), - yearindex: LazyTransformSum::from_boxed::(name, v, source.yearindex.boxed_clone()), - decadeindex: LazyTransformSum::from_boxed::(name, v, source.decadeindex.boxed_clone()), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } - } diff --git a/crates/brk_computer/src/internal/lazy/date/sum_cum.rs b/crates/brk_computer/src/internal/lazy/date/sum_cum.rs index b72cb7d9f..18cd997ad 100644 --- a/crates/brk_computer/src/internal/lazy/date/sum_cum.rs +++ b/crates/brk_computer/src/internal/lazy/date/sum_cum.rs @@ -39,44 +39,23 @@ where source: &DerivedDateSumCum, ) -> Self { let v = version + VERSION; + + macro_rules! period { + ($p:ident) => { + LazyTransformSumCum::from_boxed_sum_raw::( + name, v, source.$p.sum.boxed_clone(), source.$p.cumulative.boxed_clone(), + ) + }; + } + Self { - dateindex: LazyTransformSumCum::from_sum_cum::(name, v, dateindex), - weekindex: LazyTransformSumCum::from_boxed::( - name, - v, - source.weekindex.sum.boxed_clone(), - source.weekindex.cumulative.boxed_clone(), - ), - monthindex: LazyTransformSumCum::from_boxed::( - name, - v, - source.monthindex.sum.boxed_clone(), - source.monthindex.cumulative.boxed_clone(), - ), - quarterindex: LazyTransformSumCum::from_boxed::( - name, - v, - source.quarterindex.sum.boxed_clone(), - source.quarterindex.cumulative.boxed_clone(), - ), - semesterindex: LazyTransformSumCum::from_boxed::( - name, - v, - source.semesterindex.sum.boxed_clone(), - source.semesterindex.cumulative.boxed_clone(), - ), - yearindex: LazyTransformSumCum::from_boxed::( - name, - v, - source.yearindex.sum.boxed_clone(), - source.yearindex.cumulative.boxed_clone(), - ), - decadeindex: LazyTransformSumCum::from_boxed::( - name, - v, - source.decadeindex.sum.boxed_clone(), - source.decadeindex.cumulative.boxed_clone(), - ), + dateindex: LazyTransformSumCum::from_sum_cum_sum_raw::(name, v, dateindex), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/lazy/derived_block/binary_last.rs b/crates/brk_computer/src/internal/lazy/derived_block/binary_last.rs new file mode 100644 index 000000000..6d47d9814 --- /dev/null +++ b/crates/brk_computer/src/internal/lazy/derived_block/binary_last.rs @@ -0,0 +1,144 @@ +//! Lazy binary transform for derived block with Last aggregation only. + +use brk_traversable::Traversable; +use brk_types::{DifficultyEpoch, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{BinaryTransform, IterableCloneableVec}; + +use crate::internal::{ + ComputedBlockLast, ComputedBlockSumCum, ComputedHeightDateLast, ComputedVecValue, + LazyBinaryDateLast, LazyTransform2Last, NumericValue, +}; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct LazyDerivedBlock2Last +where + T: ComputedVecValue + PartialOrd + JsonSchema, + S1T: ComputedVecValue, + S2T: ComputedVecValue, +{ + #[deref] + #[deref_mut] + pub dates: LazyBinaryDateLast, + pub difficultyepoch: LazyTransform2Last, +} + +const VERSION: Version = Version::ZERO; + +impl LazyDerivedBlock2Last +where + T: ComputedVecValue + JsonSchema + 'static, + S1T: ComputedVecValue + JsonSchema, + S2T: ComputedVecValue + JsonSchema, +{ + pub fn from_computed_sum_cum>( + name: &str, + version: Version, + source1: &ComputedBlockSumCum, + source2: &ComputedBlockSumCum, + ) -> Self + where + S1T: PartialOrd, + S2T: PartialOrd, + { + let v = version + VERSION; + + Self { + dates: LazyBinaryDateLast::from_both_sum_cum_cumulatives::( + name, + v, + source1.dateindex.cumulative.boxed_clone(), + &source1.dates, + source2.dateindex.cumulative.boxed_clone(), + &source2.dates, + ), + difficultyepoch: LazyTransform2Last::from_vecs::( + name, + v, + source1.difficultyepoch.cumulative.boxed_clone(), + source2.difficultyepoch.cumulative.boxed_clone(), + ), + } + } + + pub fn from_computed_last>( + name: &str, + version: Version, + source1: &ComputedBlockLast, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + + Self { + dates: LazyBinaryDateLast::from_both_block_last::(name, v, source1, source2), + difficultyepoch: LazyTransform2Last::from_vecs::( + name, + v, + source1.difficultyepoch.boxed_clone(), + source2.difficultyepoch.boxed_clone(), + ), + } + } + + pub fn from_computed_height_date_last>( + name: &str, + version: Version, + source1: &ComputedHeightDateLast, + source2: &ComputedHeightDateLast, + ) -> Self + where + S1T: PartialOrd, + S2T: PartialOrd, + { + let v = version + VERSION; + + Self { + dates: LazyBinaryDateLast::from_computed_both_last::( + name, + v, + &source1.rest, + &source2.rest, + ), + difficultyepoch: LazyTransform2Last::from_vecs::( + name, + v, + source1.difficultyepoch.0.boxed_clone(), + source2.difficultyepoch.0.boxed_clone(), + ), + } + } + + pub fn from_computed_height_date_and_block_last>( + name: &str, + version: Version, + source1: &ComputedHeightDateLast, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: PartialOrd, + S2T: NumericValue, + { + let v = version + VERSION; + + Self { + dates: LazyBinaryDateLast::from_dateindex_and_height_last::( + name, + v, + &source1.rest, + source2, + ), + difficultyepoch: LazyTransform2Last::from_vecs::( + name, + v, + source1.difficultyepoch.0.boxed_clone(), + source2.difficultyepoch.boxed_clone(), + ), + } + } +} diff --git a/crates/brk_computer/src/internal/lazy/derived_block/binary_sum.rs b/crates/brk_computer/src/internal/lazy/derived_block/binary_sum.rs index 12aa4446d..10a2358f5 100644 --- a/crates/brk_computer/src/internal/lazy/derived_block/binary_sum.rs +++ b/crates/brk_computer/src/internal/lazy/derived_block/binary_sum.rs @@ -20,7 +20,6 @@ where { #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: LazyDate2Sum, pub difficultyepoch: LazyTransform2Sum, } diff --git a/crates/brk_computer/src/internal/lazy/derived_block/binary_sum_cum.rs b/crates/brk_computer/src/internal/lazy/derived_block/binary_sum_cum.rs index 623d3b387..532eb2f02 100644 --- a/crates/brk_computer/src/internal/lazy/derived_block/binary_sum_cum.rs +++ b/crates/brk_computer/src/internal/lazy/derived_block/binary_sum_cum.rs @@ -7,8 +7,9 @@ use schemars::JsonSchema; use vecdb::{BinaryTransform, IterableCloneableVec}; use crate::internal::{ - ComputedVecValue, DerivedDateFull, DerivedDateSumCum, LazyDate2SumCum, LazyFull, LazySumCum, - SumCum, + ComputedBlockLast, ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockLast, + DerivedComputedBlockSumCum, DerivedDateFull, DerivedDateSumCum, LazyDate2SumCum, LazyFull, + LazySumCum, NumericValue, SumCum, }; use super::super::transform::LazyTransform2SumCum; @@ -25,7 +26,6 @@ where { #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: LazyDate2SumCum, pub difficultyepoch: LazyTransform2SumCum, } @@ -96,4 +96,131 @@ where ), } } + + /// Without _sum suffix for pure SumCum types. + #[allow(clippy::too_many_arguments)] + pub fn from_computed_sum_raw>( + name: &str, + version: Version, + dateindex1: &SumCum, + periods1: &DerivedDateSumCum, + difficultyepoch1: &LazySumCum, + dateindex2: &SumCum, + periods2: &DerivedDateSumCum, + difficultyepoch2: &LazySumCum, + ) -> Self { + let v = version + VERSION; + + Self { + dates: LazyDate2SumCum::from_computed_sum_raw::( + name, v, dateindex1, periods1, dateindex2, periods2, + ), + difficultyepoch: LazyTransform2SumCum::from_sources_sum_raw::( + name, + v, + difficultyepoch1.sum.boxed_clone(), + difficultyepoch2.sum.boxed_clone(), + difficultyepoch1.cumulative.boxed_clone(), + difficultyepoch2.cumulative.boxed_clone(), + ), + } + } + + // --- Methods accepting SumCum + Last sources --- + + pub fn from_computed_last>( + name: &str, + version: Version, + source1: &ComputedBlockSumCum, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: PartialOrd, + S2T: NumericValue, + { + let v = version + VERSION; + + Self { + dates: LazyDate2SumCum::from_computed_last::(name, v, source1, source2), + difficultyepoch: LazyTransform2SumCum::from_sources_last_sum_raw::( + name, + v, + source1.difficultyepoch.sum.boxed_clone(), + source1.difficultyepoch.cumulative.boxed_clone(), + source2.difficultyepoch.boxed_clone(), + ), + } + } + + pub fn from_derived_computed_last>( + name: &str, + version: Version, + source1: &DerivedComputedBlockSumCum, + source2: &ComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + + Self { + dates: LazyDate2SumCum::from_derived_computed_last::(name, v, source1, source2), + difficultyepoch: LazyTransform2SumCum::from_sources_last_sum_raw::( + name, + v, + source1.difficultyepoch.sum.boxed_clone(), + source1.difficultyepoch.cumulative.boxed_clone(), + source2.difficultyepoch.boxed_clone(), + ), + } + } + + pub fn from_computed_derived_last>( + name: &str, + version: Version, + source1: &ComputedBlockSumCum, + source2: &DerivedComputedBlockLast, + ) -> Self + where + S1T: PartialOrd, + S2T: NumericValue, + { + let v = version + VERSION; + + Self { + dates: LazyDate2SumCum::from_computed_derived_last::(name, v, source1, source2), + difficultyepoch: LazyTransform2SumCum::from_sources_last_sum_raw::( + name, + v, + source1.difficultyepoch.sum.boxed_clone(), + source1.difficultyepoch.cumulative.boxed_clone(), + source2.difficultyepoch.boxed_clone(), + ), + } + } + + pub fn from_derived_last>( + name: &str, + version: Version, + source1: &DerivedComputedBlockSumCum, + source2: &DerivedComputedBlockLast, + ) -> Self + where + S1T: NumericValue, + S2T: NumericValue, + { + let v = version + VERSION; + + Self { + dates: LazyDate2SumCum::from_derived_last::(name, v, source1, source2), + difficultyepoch: LazyTransform2SumCum::from_sources_last_sum_raw::( + name, + v, + source1.difficultyepoch.sum.boxed_clone(), + source1.difficultyepoch.cumulative.boxed_clone(), + source2.difficultyepoch.boxed_clone(), + ), + } + } } diff --git a/crates/brk_computer/src/internal/lazy/derived_block/binary_sum_cum_last.rs b/crates/brk_computer/src/internal/lazy/derived_block/binary_sum_cum_last.rs deleted file mode 100644 index d97687ef3..000000000 --- a/crates/brk_computer/src/internal/lazy/derived_block/binary_sum_cum_last.rs +++ /dev/null @@ -1,136 +0,0 @@ -//! Lazy aggregated for SumCum + Last binary transform. - -use brk_traversable::Traversable; -use brk_types::{DifficultyEpoch, Version}; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableCloneableVec}; - -use crate::internal::{ - ComputedBlockLast, ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockLast, - DerivedComputedBlockSumCum, LazyDate2SumCumLast, NumericValue, -}; - -use super::super::transform::LazyTransform2SumCumLast; - -const VERSION: Version = Version::ZERO; - -#[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(merge)] -pub struct LazyDerivedBlock2SumCumLast -where - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - #[deref] - #[deref_mut] - #[traversable(flatten)] - pub dates: LazyDate2SumCumLast, - pub difficultyepoch: LazyTransform2SumCumLast, -} - -impl LazyDerivedBlock2SumCumLast -where - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, - S2T: ComputedVecValue + JsonSchema, -{ - pub fn from_computed>( - name: &str, - version: Version, - source1: &ComputedBlockSumCum, - source2: &ComputedBlockLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - dates: LazyDate2SumCumLast::from_computed::(name, v, source1, source2), - difficultyepoch: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source1.difficultyepoch.cumulative.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), - } - } - - pub fn from_derived_computed_full>( - name: &str, - version: Version, - source1: &DerivedComputedBlockSumCum, - source2: &ComputedBlockLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - dates: LazyDate2SumCumLast::from_derived_computed_full::(name, v, source1, source2), - difficultyepoch: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source1.difficultyepoch.cumulative.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), - } - } - - pub fn from_computed_derived_computed>( - name: &str, - version: Version, - source1: &ComputedBlockSumCum, - source2: &DerivedComputedBlockLast, - ) -> Self - where - S1T: PartialOrd, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - dates: LazyDate2SumCumLast::from_computed_derived_computed::( - name, v, source1, source2, - ), - difficultyepoch: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source1.difficultyepoch.cumulative.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), - } - } - - pub fn from_derived_computed>( - name: &str, - version: Version, - source1: &DerivedComputedBlockSumCum, - source2: &DerivedComputedBlockLast, - ) -> Self - where - S1T: NumericValue, - S2T: NumericValue, - { - let v = version + VERSION; - - Self { - dates: LazyDate2SumCumLast::from_derived_computed::(name, v, source1, source2), - difficultyepoch: LazyTransform2SumCumLast::from_boxed::( - name, - v, - source1.difficultyepoch.sum.boxed_clone(), - source1.difficultyepoch.cumulative.boxed_clone(), - source2.difficultyepoch.boxed_clone(), - ), - } - } -} diff --git a/crates/brk_computer/src/internal/lazy/derived_block/full.rs b/crates/brk_computer/src/internal/lazy/derived_block/full.rs index 0d0f9ca3f..faf1539e6 100644 --- a/crates/brk_computer/src/internal/lazy/derived_block/full.rs +++ b/crates/brk_computer/src/internal/lazy/derived_block/full.rs @@ -10,7 +10,7 @@ use crate::internal::{ ComputedVecValue, DerivedComputedBlockFull, DerivedDateFull, Full, LazyDateFull, NumericValue, }; -use super::super::transform::LazyTransformFull; +use super::super::transform::LazyTransformStats; #[derive(Clone, Deref, DerefMut, Traversable)] #[traversable(merge)] @@ -21,9 +21,8 @@ where { #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: LazyDateFull, - pub difficultyepoch: LazyTransformFull, + pub difficultyepoch: LazyTransformStats, } const VERSION: Version = Version::ZERO; @@ -49,7 +48,7 @@ where Self { dates: LazyDateFull::from_full::(name, v, dateindex, periods), - difficultyepoch: LazyTransformFull::from_boxed::( + difficultyepoch: LazyTransformStats::from_boxed::( name, v, difficultyepoch.average.boxed_clone(), @@ -73,7 +72,7 @@ where Self { dates: LazyDateFull::from_full::(name, v, &source.dateindex, &source.dates), - difficultyepoch: LazyTransformFull::from_boxed::( + difficultyepoch: LazyTransformStats::from_boxed::( name, v, source.difficultyepoch.average.boxed_clone(), diff --git a/crates/brk_computer/src/internal/lazy/derived_block/last.rs b/crates/brk_computer/src/internal/lazy/derived_block/last.rs index 6433f637d..ffb578275 100644 --- a/crates/brk_computer/src/internal/lazy/derived_block/last.rs +++ b/crates/brk_computer/src/internal/lazy/derived_block/last.rs @@ -7,7 +7,8 @@ use schemars::JsonSchema; use vecdb::{IterableCloneableVec, UnaryTransform}; use crate::internal::{ - ComputedBlockLast, ComputedVecValue, DerivedComputedBlockLast, LazyDateLast, NumericValue, + ComputedBlockLast, ComputedHeightDateLast, ComputedVecValue, DerivedComputedBlockLast, + LazyDateLast, NumericValue, }; use super::super::transform::LazyTransformLast; @@ -21,7 +22,6 @@ where { #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: LazyDateLast, pub difficultyepoch: LazyTransformLast, } @@ -82,4 +82,29 @@ where ), } } + + pub fn from_computed_height_date>( + name: &str, + version: Version, + source: &ComputedHeightDateLast, + ) -> Self + where + S1T: PartialOrd, + { + let v = version + VERSION; + + Self { + dates: LazyDateLast::from_derived::( + name, + v, + source.dateindex.boxed_clone(), + &source.rest.rest, + ), + difficultyepoch: LazyTransformLast::from_boxed::( + name, + v, + source.difficultyepoch.0.boxed_clone(), + ), + } + } } diff --git a/crates/brk_computer/src/internal/lazy/derived_block/mod.rs b/crates/brk_computer/src/internal/lazy/derived_block/mod.rs index b5bc0d8cc..a53d333f9 100644 --- a/crates/brk_computer/src/internal/lazy/derived_block/mod.rs +++ b/crates/brk_computer/src/internal/lazy/derived_block/mod.rs @@ -1,14 +1,14 @@ +mod binary_last; mod binary_sum; mod binary_sum_cum; -mod binary_sum_cum_last; mod full; mod last; mod sum; mod sum_cum; +pub use binary_last::*; pub use binary_sum::*; pub use binary_sum_cum::*; -pub use binary_sum_cum_last::*; pub use full::*; pub use last::*; pub use sum::*; diff --git a/crates/brk_computer/src/internal/lazy/derived_block/sum.rs b/crates/brk_computer/src/internal/lazy/derived_block/sum.rs index 8d20fcead..4fdcde4cf 100644 --- a/crates/brk_computer/src/internal/lazy/derived_block/sum.rs +++ b/crates/brk_computer/src/internal/lazy/derived_block/sum.rs @@ -21,7 +21,6 @@ where { #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: LazyDateSum, pub difficultyepoch: LazyTransformSum, } diff --git a/crates/brk_computer/src/internal/lazy/derived_block/sum_cum.rs b/crates/brk_computer/src/internal/lazy/derived_block/sum_cum.rs index 15ff9c0d3..cb2d36cfa 100644 --- a/crates/brk_computer/src/internal/lazy/derived_block/sum_cum.rs +++ b/crates/brk_computer/src/internal/lazy/derived_block/sum_cum.rs @@ -22,7 +22,6 @@ where { #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: LazyDateSumCum, pub difficultyepoch: LazyTransformSumCum, } @@ -45,7 +44,7 @@ where Self { dates: LazyDateSumCum::from_sum_cum::(name, v, dateindex, periods), - difficultyepoch: LazyTransformSumCum::from_boxed::( + difficultyepoch: LazyTransformSumCum::from_boxed_sum_raw::( name, v, difficultyepoch.sum.boxed_clone(), @@ -66,7 +65,7 @@ where Self { dates: LazyDateSumCum::from_sum_cum::(name, v, &source.dateindex, &source.dates), - difficultyepoch: LazyTransformSumCum::from_boxed::( + difficultyepoch: LazyTransformSumCum::from_boxed_sum_raw::( name, v, source.difficultyepoch.sum.boxed_clone(), diff --git a/crates/brk_computer/src/internal/lazy/derived_tx/distribution.rs b/crates/brk_computer/src/internal/lazy/derived_tx/distribution.rs new file mode 100644 index 000000000..3fcc7b2c3 --- /dev/null +++ b/crates/brk_computer/src/internal/lazy/derived_tx/distribution.rs @@ -0,0 +1,67 @@ +//! LazyTxDistribution - lazy txindex source + computed distribution. + +use brk_error::Result; +use brk_indexer::Indexer; +use brk_traversable::Traversable; +use brk_types::{TxIndex, Version}; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{CollectableVec, Database, Exit, LazyVecFrom2}; + +use crate::{ + ComputeIndexes, indexes, + internal::{ComputedVecValue, DerivedTxDistribution, NumericValue}, +}; + +const VERSION: Version = Version::ZERO; + +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct LazyTxDistribution +where + T: ComputedVecValue + PartialOrd + JsonSchema, + S1: ComputedVecValue, + S2: ComputedVecValue, +{ + pub txindex: LazyVecFrom2, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub distribution: DerivedTxDistribution, +} + +impl LazyTxDistribution +where + T: NumericValue + JsonSchema, + S1: ComputedVecValue + JsonSchema, + S2: ComputedVecValue + JsonSchema, +{ + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + txindex: LazyVecFrom2, + indexes: &indexes::Vecs, + ) -> Result { + let v = version + VERSION; + let distribution = DerivedTxDistribution::forced_import(db, name, v, indexes)?; + Ok(Self { + txindex, + distribution, + }) + } + + pub fn derive_from( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> + where + LazyVecFrom2: CollectableVec, + { + self.distribution + .derive_from(indexer, indexes, starting_indexes, &self.txindex, exit) + } +} diff --git a/crates/brk_computer/src/internal/lazy/derived_tx/full.rs b/crates/brk_computer/src/internal/lazy/derived_tx/full.rs index 218ae81a5..9b5a44f7b 100644 --- a/crates/brk_computer/src/internal/lazy/derived_tx/full.rs +++ b/crates/brk_computer/src/internal/lazy/derived_tx/full.rs @@ -10,7 +10,7 @@ use vecdb::{IterableCloneableVec, UnaryTransform}; use crate::internal::{ComputedVecValue, DerivedTxFull}; -use super::super::transform::LazyTransformFull; +use super::super::transform::{LazyTransformFull, LazyTransformStats}; #[derive(Clone, Traversable)] #[traversable(merge)] @@ -20,14 +20,14 @@ where S1T: ComputedVecValue, { pub height: LazyTransformFull, - pub difficultyepoch: LazyTransformFull, - pub dateindex: LazyTransformFull, - pub weekindex: LazyTransformFull, - pub monthindex: LazyTransformFull, - pub quarterindex: LazyTransformFull, - pub semesterindex: LazyTransformFull, - pub yearindex: LazyTransformFull, - pub decadeindex: LazyTransformFull, + pub difficultyepoch: LazyTransformStats, + pub dateindex: LazyTransformStats, + pub weekindex: LazyTransformStats, + pub monthindex: LazyTransformStats, + pub quarterindex: LazyTransformStats, + pub semesterindex: LazyTransformStats, + pub yearindex: LazyTransformStats, + pub decadeindex: LazyTransformStats, } const VERSION: Version = Version::ZERO; @@ -44,80 +44,34 @@ where ) -> Self { let v = version + VERSION; + macro_rules! period { + ($p:ident) => { + LazyTransformStats::from_boxed::( + name, v, + source.$p.average.boxed_clone(), source.$p.min.boxed_clone(), + source.$p.max.boxed_clone(), source.$p.sum.boxed_clone(), + source.$p.cumulative.boxed_clone(), + ) + }; + } + Self { height: LazyTransformFull::from_stats_aggregate::(name, v, &source.height), - difficultyepoch: LazyTransformFull::from_boxed::( - name, - v, - source.difficultyepoch.average.boxed_clone(), - source.difficultyepoch.min.boxed_clone(), - source.difficultyepoch.max.boxed_clone(), - source.difficultyepoch.sum.boxed_clone(), - source.difficultyepoch.cumulative.boxed_clone(), - ), - dateindex: LazyTransformFull::from_boxed::( - name, - v, + difficultyepoch: period!(difficultyepoch), + dateindex: LazyTransformStats::from_boxed::( + name, v, source.dateindex.average.0.boxed_clone(), source.dateindex.minmax.min.0.boxed_clone(), source.dateindex.minmax.max.0.boxed_clone(), source.dateindex.sum_cum.sum.0.boxed_clone(), source.dateindex.sum_cum.cumulative.0.boxed_clone(), ), - weekindex: LazyTransformFull::from_boxed::( - name, - v, - source.weekindex.average.boxed_clone(), - source.weekindex.min.boxed_clone(), - source.weekindex.max.boxed_clone(), - source.weekindex.sum.boxed_clone(), - source.weekindex.cumulative.boxed_clone(), - ), - monthindex: LazyTransformFull::from_boxed::( - name, - v, - source.monthindex.average.boxed_clone(), - source.monthindex.min.boxed_clone(), - source.monthindex.max.boxed_clone(), - source.monthindex.sum.boxed_clone(), - source.monthindex.cumulative.boxed_clone(), - ), - quarterindex: LazyTransformFull::from_boxed::( - name, - v, - source.quarterindex.average.boxed_clone(), - source.quarterindex.min.boxed_clone(), - source.quarterindex.max.boxed_clone(), - source.quarterindex.sum.boxed_clone(), - source.quarterindex.cumulative.boxed_clone(), - ), - semesterindex: LazyTransformFull::from_boxed::( - name, - v, - source.semesterindex.average.boxed_clone(), - source.semesterindex.min.boxed_clone(), - source.semesterindex.max.boxed_clone(), - source.semesterindex.sum.boxed_clone(), - source.semesterindex.cumulative.boxed_clone(), - ), - yearindex: LazyTransformFull::from_boxed::( - name, - v, - source.yearindex.average.boxed_clone(), - source.yearindex.min.boxed_clone(), - source.yearindex.max.boxed_clone(), - source.yearindex.sum.boxed_clone(), - source.yearindex.cumulative.boxed_clone(), - ), - decadeindex: LazyTransformFull::from_boxed::( - name, - v, - source.decadeindex.average.boxed_clone(), - source.decadeindex.min.boxed_clone(), - source.decadeindex.max.boxed_clone(), - source.decadeindex.sum.boxed_clone(), - source.decadeindex.cumulative.boxed_clone(), - ), + weekindex: period!(weekindex), + monthindex: period!(monthindex), + quarterindex: period!(quarterindex), + semesterindex: period!(semesterindex), + yearindex: period!(yearindex), + decadeindex: period!(decadeindex), } } } diff --git a/crates/brk_computer/src/internal/lazy/derived_tx/mod.rs b/crates/brk_computer/src/internal/lazy/derived_tx/mod.rs index bbd1bdfa9..b0d01da0e 100644 --- a/crates/brk_computer/src/internal/lazy/derived_tx/mod.rs +++ b/crates/brk_computer/src/internal/lazy/derived_tx/mod.rs @@ -1,3 +1,5 @@ +mod distribution; mod full; +pub use distribution::*; pub use full::*; diff --git a/crates/brk_computer/src/internal/lazy/transform/binary_sum_cum.rs b/crates/brk_computer/src/internal/lazy/transform/binary_sum_cum.rs index 23d1e5849..48a1beb86 100644 --- a/crates/brk_computer/src/internal/lazy/transform/binary_sum_cum.rs +++ b/crates/brk_computer/src/internal/lazy/transform/binary_sum_cum.rs @@ -7,7 +7,7 @@ use vecdb::{ BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, VecIndex, VecValue, }; -use crate::internal::{ComputedVecValue, LazyFull, SumCum}; +use crate::internal::{ComputedVecValue, LazyFull, LastVec, SumCum}; #[derive(Clone, Traversable)] pub struct LazyTransform2SumCum @@ -50,6 +50,29 @@ where } } + /// Create from SumCum without adding _sum suffix. + pub fn from_sum_cum_sum_raw>( + name: &str, + version: Version, + source1: &SumCum, + source2: &SumCum, + ) -> Self { + Self { + sum: LazyVecFrom2::transformed::( + name, + version, + source1.sum.0.boxed_clone(), + source2.sum.0.boxed_clone(), + ), + cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + version, + source1.cumulative.0.boxed_clone(), + source2.cumulative.0.boxed_clone(), + ), + } + } + pub fn from_sources>( name: &str, version: Version, @@ -74,6 +97,26 @@ where } } + /// Create from sources without adding _sum suffix. + pub fn from_sources_sum_raw>( + name: &str, + version: Version, + sum_source1: IterableBoxedVec, + sum_source2: IterableBoxedVec, + cum_source1: IterableBoxedVec, + cum_source2: IterableBoxedVec, + ) -> Self { + Self { + sum: LazyVecFrom2::transformed::(name, version, sum_source1, sum_source2), + cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + version, + cum_source1, + cum_source2, + ), + } + } + pub fn from_lazy_stats_aggregate( name: &str, version: Version, @@ -102,4 +145,123 @@ where ), } } + + /// Create from lazy stats aggregate without adding _sum suffix. + pub fn from_lazy_stats_aggregate_sum_raw( + name: &str, + version: Version, + source1: &LazyFull, + source2: &LazyFull, + ) -> Self + where + F: BinaryTransform, + S1I: VecIndex + 'static, + S1L: VecValue, + S2I: VecIndex + 'static, + S2L: VecValue, + { + Self { + sum: LazyVecFrom2::transformed::( + name, + version, + source1.sum.boxed_clone(), + source2.sum.boxed_clone(), + ), + cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + version, + source1.cumulative.boxed_clone(), + source2.cumulative.boxed_clone(), + ), + } + } + + // --- Methods accepting SumCum + Last sources --- + + pub fn from_sum_cum_last>( + name: &str, + version: Version, + source1: &SumCum, + source2: &LastVec, + ) -> Self { + Self { + sum: LazyVecFrom2::transformed::( + &format!("{name}_sum"), + version, + source1.sum.0.boxed_clone(), + source2.0.boxed_clone(), + ), + cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + version, + source1.cumulative.0.boxed_clone(), + source2.0.boxed_clone(), + ), + } + } + + /// Create from SumCum + Last without adding _sum suffix. + pub fn from_sum_cum_last_sum_raw>( + name: &str, + version: Version, + source1: &SumCum, + source2: &LastVec, + ) -> Self { + Self { + sum: LazyVecFrom2::transformed::( + name, + version, + source1.sum.0.boxed_clone(), + source2.0.boxed_clone(), + ), + cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + version, + source1.cumulative.0.boxed_clone(), + source2.0.boxed_clone(), + ), + } + } + + pub fn from_sources_last>( + name: &str, + version: Version, + sum_source1: IterableBoxedVec, + cum_source1: IterableBoxedVec, + last_source: IterableBoxedVec, + ) -> Self { + Self { + sum: LazyVecFrom2::transformed::( + &format!("{name}_sum"), + version, + sum_source1, + last_source.clone(), + ), + cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + version, + cum_source1, + last_source, + ), + } + } + + /// Create from boxed SumCum + Last sources without adding _sum suffix. + pub fn from_sources_last_sum_raw>( + name: &str, + version: Version, + sum_source1: IterableBoxedVec, + cum_source1: IterableBoxedVec, + last_source: IterableBoxedVec, + ) -> Self { + Self { + sum: LazyVecFrom2::transformed::(name, version, sum_source1, last_source.clone()), + cumulative: LazyVecFrom2::transformed::( + &format!("{name}_cumulative"), + version, + cum_source1, + last_source, + ), + } + } } diff --git a/crates/brk_computer/src/internal/lazy/transform/binary_sum_cum_last.rs b/crates/brk_computer/src/internal/lazy/transform/binary_sum_cum_last.rs deleted file mode 100644 index 9a2447feb..000000000 --- a/crates/brk_computer/src/internal/lazy/transform/binary_sum_cum_last.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! Lazy binary transform for SumCum + Last → SumCum result. - -use brk_traversable::Traversable; -use brk_types::Version; -use schemars::JsonSchema; -use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, VecIndex}; - -use crate::internal::{ComputedVecValue, LastVec, SumCum}; - -#[derive(Clone, Traversable)] -pub struct LazyTransform2SumCumLast -where - I: VecIndex, - T: ComputedVecValue + PartialOrd + JsonSchema, - S1T: ComputedVecValue, - S2T: ComputedVecValue, -{ - pub sum: LazyVecFrom2, - pub cumulative: LazyVecFrom2, -} - -impl LazyTransform2SumCumLast -where - I: VecIndex, - T: ComputedVecValue + JsonSchema + 'static, - S1T: ComputedVecValue + JsonSchema, - S2T: ComputedVecValue + JsonSchema, -{ - pub fn from_sources>( - name: &str, - version: Version, - source1: &SumCum, - source2: &LastVec, - ) -> Self { - Self { - sum: LazyVecFrom2::transformed::( - name, - version, - source1.sum.0.boxed_clone(), - source2.0.boxed_clone(), - ), - cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - version, - source1.cumulative.0.boxed_clone(), - source2.0.boxed_clone(), - ), - } - } - - pub fn from_boxed>( - name: &str, - version: Version, - sum_source: IterableBoxedVec, - cum_source: IterableBoxedVec, - last_source: IterableBoxedVec, - ) -> Self { - Self { - sum: LazyVecFrom2::transformed::(name, version, sum_source, last_source.clone()), - cumulative: LazyVecFrom2::transformed::( - &format!("{name}_cumulative"), - version, - cum_source, - last_source, - ), - } - } -} diff --git a/crates/brk_computer/src/internal/lazy/transform/full.rs b/crates/brk_computer/src/internal/lazy/transform/full.rs index 3a246e5cb..800f14388 100644 --- a/crates/brk_computer/src/internal/lazy/transform/full.rs +++ b/crates/brk_computer/src/internal/lazy/transform/full.rs @@ -3,10 +3,12 @@ use brk_traversable::Traversable; use brk_types::Version; use schemars::JsonSchema; -use vecdb::{IterableBoxedVec, IterableCloneableVec, LazyVecFrom1, UnaryTransform, VecIndex}; +use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform, VecIndex}; use crate::internal::{ComputedVecValue, Full}; +use super::LazyPercentiles; + #[derive(Clone, Traversable)] pub struct LazyTransformFull where @@ -17,6 +19,7 @@ where pub average: LazyVecFrom1, pub min: LazyVecFrom1, pub max: LazyVecFrom1, + pub percentiles: LazyPercentiles, pub sum: LazyVecFrom1, pub cumulative: LazyVecFrom1, } @@ -48,42 +51,22 @@ where version, source.distribution.minmax.max.0.boxed_clone(), ), + percentiles: LazyPercentiles::from_percentiles::( + name, + version, + &source.distribution.percentiles, + ), sum: LazyVecFrom1::transformed::( &format!("{name}_sum"), version, source.sum_cum.sum.0.boxed_clone(), ), cumulative: LazyVecFrom1::transformed::( - &format!("{name}_cum"), + &format!("{name}_cumulative"), version, source.sum_cum.cumulative.0.boxed_clone(), ), } } - pub fn from_boxed>( - name: &str, - version: Version, - average_source: IterableBoxedVec, - min_source: IterableBoxedVec, - max_source: IterableBoxedVec, - sum_source: IterableBoxedVec, - cumulative_source: IterableBoxedVec, - ) -> Self { - Self { - average: LazyVecFrom1::transformed::( - &format!("{name}_average"), - version, - average_source, - ), - min: LazyVecFrom1::transformed::(&format!("{name}_min"), version, min_source), - max: LazyVecFrom1::transformed::(&format!("{name}_max"), version, max_source), - sum: LazyVecFrom1::transformed::(&format!("{name}_sum"), version, sum_source), - cumulative: LazyVecFrom1::transformed::( - &format!("{name}_cum"), - version, - cumulative_source, - ), - } - } } diff --git a/crates/brk_computer/src/internal/lazy/transform/mod.rs b/crates/brk_computer/src/internal/lazy/transform/mod.rs index 2273f7385..7cc7a6f78 100644 --- a/crates/brk_computer/src/internal/lazy/transform/mod.rs +++ b/crates/brk_computer/src/internal/lazy/transform/mod.rs @@ -1,17 +1,19 @@ mod binary_last; mod binary_sum; mod binary_sum_cum; -mod binary_sum_cum_last; mod full; mod last; +mod percentiles; +mod stats; mod sum; mod sum_cum; pub use binary_last::*; pub use binary_sum::*; pub use binary_sum_cum::*; -pub use binary_sum_cum_last::*; pub use full::*; pub use last::*; +pub use percentiles::*; +pub use stats::*; pub use sum::*; pub use sum_cum::*; diff --git a/crates/brk_computer/src/internal/lazy/transform/percentiles.rs b/crates/brk_computer/src/internal/lazy/transform/percentiles.rs new file mode 100644 index 000000000..952fd0014 --- /dev/null +++ b/crates/brk_computer/src/internal/lazy/transform/percentiles.rs @@ -0,0 +1,63 @@ +//! Lazy unary transform for Percentiles. + +use brk_traversable::Traversable; +use brk_types::Version; +use schemars::JsonSchema; +use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform, VecIndex}; + +use crate::internal::{ComputedVecValue, Percentiles}; + +#[derive(Clone, Traversable)] +pub struct LazyPercentiles +where + I: VecIndex, + T: ComputedVecValue + PartialOrd + JsonSchema, + S1T: ComputedVecValue, +{ + pub pct10: LazyVecFrom1, + pub pct25: LazyVecFrom1, + pub median: LazyVecFrom1, + pub pct75: LazyVecFrom1, + pub pct90: LazyVecFrom1, +} + +impl LazyPercentiles +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, + S1T: ComputedVecValue + JsonSchema, +{ + pub fn from_percentiles>( + name: &str, + version: Version, + source: &Percentiles, + ) -> Self { + Self { + pct10: LazyVecFrom1::transformed::( + &format!("{name}_pct10"), + version, + source.pct10.0.boxed_clone(), + ), + pct25: LazyVecFrom1::transformed::( + &format!("{name}_pct25"), + version, + source.pct25.0.boxed_clone(), + ), + median: LazyVecFrom1::transformed::( + &format!("{name}_median"), + version, + source.median.0.boxed_clone(), + ), + pct75: LazyVecFrom1::transformed::( + &format!("{name}_pct75"), + version, + source.pct75.0.boxed_clone(), + ), + pct90: LazyVecFrom1::transformed::( + &format!("{name}_pct90"), + version, + source.pct90.0.boxed_clone(), + ), + } + } +} diff --git a/crates/brk_computer/src/internal/lazy/transform/stats.rs b/crates/brk_computer/src/internal/lazy/transform/stats.rs new file mode 100644 index 000000000..ff0fec161 --- /dev/null +++ b/crates/brk_computer/src/internal/lazy/transform/stats.rs @@ -0,0 +1,56 @@ +//! Lazy unary transform for Stats (without percentiles). + +use brk_traversable::Traversable; +use brk_types::Version; +use schemars::JsonSchema; +use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex}; + +use crate::internal::ComputedVecValue; + +/// Lazy transform matching Stats structure: average, min, max, sum, cumulative (no percentiles). +#[derive(Clone, Traversable)] +pub struct LazyTransformStats +where + I: VecIndex, + T: ComputedVecValue + PartialOrd + JsonSchema, + S1T: ComputedVecValue, +{ + pub average: LazyVecFrom1, + pub min: LazyVecFrom1, + pub max: LazyVecFrom1, + pub sum: LazyVecFrom1, + pub cumulative: LazyVecFrom1, +} + +impl LazyTransformStats +where + I: VecIndex, + T: ComputedVecValue + JsonSchema + 'static, + S1T: ComputedVecValue + JsonSchema, +{ + pub fn from_boxed>( + name: &str, + version: Version, + average_source: IterableBoxedVec, + min_source: IterableBoxedVec, + max_source: IterableBoxedVec, + sum_source: IterableBoxedVec, + cumulative_source: IterableBoxedVec, + ) -> Self { + Self { + average: LazyVecFrom1::transformed::( + &format!("{name}_average"), + version, + average_source, + ), + min: LazyVecFrom1::transformed::(&format!("{name}_min"), version, min_source), + max: LazyVecFrom1::transformed::(&format!("{name}_max"), version, max_source), + sum: LazyVecFrom1::transformed::(&format!("{name}_sum"), version, sum_source), + cumulative: LazyVecFrom1::transformed::( + &format!("{name}_cumulative"), + version, + cumulative_source, + ), + } + } +} diff --git a/crates/brk_computer/src/internal/lazy/transform/sum_cum.rs b/crates/brk_computer/src/internal/lazy/transform/sum_cum.rs index a3af204ee..f53e686a2 100644 --- a/crates/brk_computer/src/internal/lazy/transform/sum_cum.rs +++ b/crates/brk_computer/src/internal/lazy/transform/sum_cum.rs @@ -43,6 +43,22 @@ where } } + /// Create from SumCum without adding _sum suffix. + pub fn from_sum_cum_sum_raw>( + name: &str, + version: Version, + source: &SumCum, + ) -> Self { + Self { + sum: LazyVecFrom1::transformed::(name, version, source.sum.0.boxed_clone()), + cumulative: LazyVecFrom1::transformed::( + &format!("{name}_cumulative"), + version, + source.cumulative.0.boxed_clone(), + ), + } + } + pub fn from_boxed>( name: &str, version: Version, @@ -58,4 +74,21 @@ where ), } } + + /// Create from boxed sources without adding _sum suffix. + pub fn from_boxed_sum_raw>( + name: &str, + version: Version, + sum_source: IterableBoxedVec, + cumulative_source: IterableBoxedVec, + ) -> Self { + Self { + sum: LazyVecFrom1::transformed::(name, version, sum_source), + cumulative: LazyVecFrom1::transformed::( + &format!("{name}_cumulative"), + version, + cumulative_source, + ), + } + } } diff --git a/crates/brk_computer/src/internal/specialized/constant.rs b/crates/brk_computer/src/internal/specialized/constant.rs index 240c4faa1..7e9938020 100644 --- a/crates/brk_computer/src/internal/specialized/constant.rs +++ b/crates/brk_computer/src/internal/specialized/constant.rs @@ -44,42 +44,42 @@ impl ConstantVecs { height: LazyVecFrom1::transformed::( name, version, - indexes.block.height_to_height.boxed_clone(), + indexes.height.identity.boxed_clone(), ), dateindex: LazyVecFrom1::transformed::( name, version, - indexes.time.dateindex_to_dateindex.boxed_clone(), + indexes.dateindex.identity.boxed_clone(), ), weekindex: LazyVecFrom1::transformed::( name, version, - indexes.time.weekindex_to_weekindex.boxed_clone(), + indexes.weekindex.identity.boxed_clone(), ), monthindex: LazyVecFrom1::transformed::( name, version, - indexes.time.monthindex_to_monthindex.boxed_clone(), + indexes.monthindex.identity.boxed_clone(), ), quarterindex: LazyVecFrom1::transformed::( name, version, - indexes.time.quarterindex_to_quarterindex.boxed_clone(), + indexes.quarterindex.identity.boxed_clone(), ), semesterindex: LazyVecFrom1::transformed::( name, version, - indexes.time.semesterindex_to_semesterindex.boxed_clone(), + indexes.semesterindex.identity.boxed_clone(), ), yearindex: LazyVecFrom1::transformed::( name, version, - indexes.time.yearindex_to_yearindex.boxed_clone(), + indexes.yearindex.identity.boxed_clone(), ), decadeindex: LazyVecFrom1::transformed::( name, version, - indexes.time.decadeindex_to_decadeindex.boxed_clone(), + indexes.decadeindex.identity.boxed_clone(), ), } } diff --git a/crates/brk_computer/src/internal/specialized/lazy_period.rs b/crates/brk_computer/src/internal/specialized/lazy_period.rs new file mode 100644 index 000000000..a0fbd6c73 --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/lazy_period.rs @@ -0,0 +1,88 @@ +//! Generic lazy vecs for all time period indexes. + +use brk_traversable::Traversable; +use brk_types::{ + DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, +}; +use schemars::JsonSchema; +use serde::Serialize; +use vecdb::{ComputeFrom1, Formattable, IterableCloneableVec, LazyVecFrom1, VecValue}; + +use crate::indexes; + +/// Lazy vecs for all time period indexes (no height). +#[derive(Clone, Traversable)] +#[traversable(merge)] +pub struct LazyPeriodVecs +where + T: VecValue + Formattable + Serialize + JsonSchema, +{ + pub dateindex: LazyVecFrom1, + pub weekindex: LazyVecFrom1, + pub monthindex: LazyVecFrom1, + pub quarterindex: LazyVecFrom1, + pub semesterindex: LazyVecFrom1, + pub yearindex: LazyVecFrom1, + pub decadeindex: LazyVecFrom1, +} + +impl LazyPeriodVecs { + #[allow(clippy::too_many_arguments)] + pub fn new( + name: &str, + version: Version, + indexes: &indexes::Vecs, + dateindex_fn: ComputeFrom1, + weekindex_fn: ComputeFrom1, + monthindex_fn: ComputeFrom1, + quarterindex_fn: ComputeFrom1, + semesterindex_fn: ComputeFrom1, + yearindex_fn: ComputeFrom1, + decadeindex_fn: ComputeFrom1, + ) -> Self { + Self { + dateindex: LazyVecFrom1::init( + name, + version, + indexes.dateindex.identity.boxed_clone(), + dateindex_fn, + ), + weekindex: LazyVecFrom1::init( + name, + version, + indexes.weekindex.identity.boxed_clone(), + weekindex_fn, + ), + monthindex: LazyVecFrom1::init( + name, + version, + indexes.monthindex.identity.boxed_clone(), + monthindex_fn, + ), + quarterindex: LazyVecFrom1::init( + name, + version, + indexes.quarterindex.identity.boxed_clone(), + quarterindex_fn, + ), + semesterindex: LazyVecFrom1::init( + name, + version, + indexes.semesterindex.identity.boxed_clone(), + semesterindex_fn, + ), + yearindex: LazyVecFrom1::init( + name, + version, + indexes.yearindex.identity.boxed_clone(), + yearindex_fn, + ), + decadeindex: LazyVecFrom1::init( + name, + version, + indexes.decadeindex.identity.boxed_clone(), + decadeindex_fn, + ), + } + } +} diff --git a/crates/brk_computer/src/internal/specialized/mod.rs b/crates/brk_computer/src/internal/specialized/mod.rs index f544732a5..9b807be0e 100644 --- a/crates/brk_computer/src/internal/specialized/mod.rs +++ b/crates/brk_computer/src/internal/specialized/mod.rs @@ -1,10 +1,14 @@ mod constant; +mod lazy_period; +mod ohlc; mod percentiles; mod ratio; mod stddev; mod value; pub use constant::*; +pub use lazy_period::*; +pub use ohlc::*; pub use percentiles::*; pub use ratio::*; pub use stddev::*; diff --git a/crates/brk_computer/src/internal/specialized/ohlc/computed.rs b/crates/brk_computer/src/internal/specialized/ohlc/computed.rs new file mode 100644 index 000000000..f27f28b18 --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/ohlc/computed.rs @@ -0,0 +1,68 @@ +//! OHLC computed aggregations combining height, dateindex, and period indexes. + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Close, High, Low, Open, Version}; +use schemars::JsonSchema; +use vecdb::Database; + +use crate::indexes; +use crate::internal::{ + ComputedHeightDateFirst, ComputedHeightDateLast, ComputedHeightDateMax, ComputedHeightDateMin, + ComputedVecValue, +}; + +/// Combined OHLC computed vecs with all indexes (height + dateindex + periods + difficultyepoch). +/// +/// Access pattern: `ohlc.{open,high,low,close}.{height,dateindex,weekindex,...,difficultyepoch}` +#[derive(Clone, Traversable)] +pub struct OHLCComputedVecs +where + T: ComputedVecValue + PartialOrd + JsonSchema + From, + f64: From, +{ + pub open: ComputedHeightDateFirst>, + pub high: ComputedHeightDateMax>, + pub low: ComputedHeightDateMin>, + pub close: ComputedHeightDateLast>, +} + +impl OHLCComputedVecs +where + T: ComputedVecValue + PartialOrd + JsonSchema + From + 'static, + f64: From, +{ + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + ) -> Result { + Ok(Self { + open: ComputedHeightDateFirst::forced_import( + db, + &format!("{name}_open"), + version, + indexes, + )?, + high: ComputedHeightDateMax::forced_import( + db, + &format!("{name}_high"), + version, + indexes, + )?, + low: ComputedHeightDateMin::forced_import( + db, + &format!("{name}_low"), + version, + indexes, + )?, + close: ComputedHeightDateLast::forced_import( + db, + &format!("{name}_close"), + version, + indexes, + )?, + }) + } +} diff --git a/crates/brk_computer/src/internal/specialized/ohlc/lazy.rs b/crates/brk_computer/src/internal/specialized/ohlc/lazy.rs new file mode 100644 index 000000000..4a56a9aad --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/ohlc/lazy.rs @@ -0,0 +1,32 @@ +//! Lazy OHLC component extractors for height + dateindex. + +use brk_traversable::Traversable; +use brk_types::{Close, DateIndex, Height, High, Low, Open}; +use schemars::JsonSchema; +use serde::Serialize; +use vecdb::{BytesVecValue, Formattable, LazyVecFrom1, VecIndex}; + +/// Lazy OHLC component extractors for a single index type. +#[derive(Clone, Traversable)] +pub struct LazyOHLC +where + I: VecIndex + BytesVecValue + Formattable + Serialize + JsonSchema + 'static, + T: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, + SourceT: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, +{ + pub open: LazyVecFrom1, I, SourceT>, + pub high: LazyVecFrom1, I, SourceT>, + pub low: LazyVecFrom1, I, SourceT>, + pub close: LazyVecFrom1, I, SourceT>, +} + +/// Lazy OHLC component extractors for height + dateindex. +#[derive(Clone, Traversable)] +pub struct HeightDateLazyOHLC +where + T: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, + SourceT: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, +{ + pub height: LazyOHLC, + pub dateindex: LazyOHLC, +} diff --git a/crates/brk_computer/src/internal/specialized/ohlc/mod.rs b/crates/brk_computer/src/internal/specialized/ohlc/mod.rs new file mode 100644 index 000000000..ffc51ad6a --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/ohlc/mod.rs @@ -0,0 +1,9 @@ +//! OHLC - Open/High/Low/Close specialized types. + +mod computed; +mod lazy; +mod period; + +pub use computed::*; +pub use lazy::*; +pub use period::*; diff --git a/crates/brk_computer/src/internal/specialized/ohlc/period.rs b/crates/brk_computer/src/internal/specialized/ohlc/period.rs new file mode 100644 index 000000000..74537b66a --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/ohlc/period.rs @@ -0,0 +1,29 @@ +//! OHLC period groupings for all time/chain periods. + +use brk_traversable::Traversable; +use brk_types::{ + DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex, + WeekIndex, YearIndex, +}; +use schemars::JsonSchema; +use serde::Serialize; +use vecdb::{BytesVec, BytesVecValue, EagerVec, Formattable}; + +/// Bundled OHLC vecs for all periods (time + chain based). +#[derive(Clone, Traversable)] +pub struct OHLCPeriodVecs +where + T: BytesVecValue + Formattable + Serialize + JsonSchema + 'static, +{ + // Time-based periods + pub dateindex: EagerVec>, + pub week: EagerVec>, + pub month: EagerVec>, + pub quarter: EagerVec>, + pub semester: EagerVec>, + pub year: EagerVec>, + pub decade: EagerVec>, + // Chain-based periods + pub height: EagerVec>, + pub difficultyepoch: EagerVec>, +} diff --git a/crates/brk_computer/src/internal/specialized/percentiles.rs b/crates/brk_computer/src/internal/specialized/percentiles.rs index 90a2b15d3..751026597 100644 --- a/crates/brk_computer/src/internal/specialized/percentiles.rs +++ b/crates/brk_computer/src/internal/specialized/percentiles.rs @@ -32,13 +32,13 @@ impl CostBasisPercentiles { ) -> Result { let vecs = PERCENTILES.map(|p| { compute.then(|| { - ComputedDateLast::forced_import( - db, - &format!("{name}_cost_basis_pct{p:02}"), - version + VERSION, - indexes, - ) - .unwrap() + let metric_name = if name.is_empty() { + format!("cost_basis_pct{p:02}") + } else { + format!("{name}_cost_basis_pct{p:02}") + }; + ComputedDateLast::forced_import(db, &metric_name, version + VERSION, indexes) + .unwrap() }) }); diff --git a/crates/brk_computer/src/internal/specialized/ratio.rs b/crates/brk_computer/src/internal/specialized/ratio.rs index 1290e58ce..17ba1c94c 100644 --- a/crates/brk_computer/src/internal/specialized/ratio.rs +++ b/crates/brk_computer/src/internal/specialized/ratio.rs @@ -9,7 +9,7 @@ use vecdb::{ use crate::{ ComputeIndexes, indexes, internal::{ - BinaryDateLast, ComputedStandardDeviationVecsDate, PriceTimesRatio, + ComputedStandardDeviationVecsDate, LazyBinaryDateLast, PriceTimesRatio, StandardDeviationVecsOptions, }, price, @@ -31,12 +31,12 @@ pub struct ComputedRatioVecsDate { pub ratio_pct5: Option>, pub ratio_pct2: Option>, pub ratio_pct1: Option>, - pub ratio_pct99_usd: Option>, - pub ratio_pct98_usd: Option>, - pub ratio_pct95_usd: Option>, - pub ratio_pct5_usd: Option>, - pub ratio_pct2_usd: Option>, - pub ratio_pct1_usd: Option>, + pub ratio_pct99_usd: Option>, + pub ratio_pct98_usd: Option>, + pub ratio_pct95_usd: Option>, + pub ratio_pct5_usd: Option>, + pub ratio_pct2_usd: Option>, + pub ratio_pct1_usd: Option>, pub ratio_sd: Option, pub ratio_4y_sd: Option, @@ -97,7 +97,7 @@ impl ComputedRatioVecsDate { ($ratio:expr, $suffix:expr) => { if let Some(mp) = metric_price { $ratio.as_ref().map(|r| { - BinaryDateLast::from_height_and_dateindex_last::( + LazyBinaryDateLast::from_height_and_dateindex_last::( &format!("{name}_{}", $suffix), v, mp, @@ -106,7 +106,7 @@ impl ComputedRatioVecsDate { }) } else { price.as_ref().zip($ratio.as_ref()).map(|(p, r)| { - BinaryDateLast::from_computed_both_last::( + LazyBinaryDateLast::from_computed_both_last::( &format!("{name}_{}", $suffix), v, p, @@ -167,7 +167,7 @@ impl ComputedRatioVecsDate { exit: &Exit, price_opt: Option<&impl IterableVec>, ) -> Result<()> { - let closes = &price.usd.timeindexes_to_price_close.dateindex; + let closes = &price.usd.split.close.dateindex; let price = price_opt.unwrap_or_else(|| unsafe { std::mem::transmute(&self.price.as_ref().unwrap().dateindex) @@ -292,81 +292,49 @@ impl ComputedRatioVecsDate { .try_for_each(|v| v.flush())?; } - self.ratio_pct1.as_mut().unwrap().compute_rest( - starting_indexes, - exit, - None as Option<&EagerVec>>, - )?; - self.ratio_pct2.as_mut().unwrap().compute_rest( - starting_indexes, - exit, - None as Option<&EagerVec>>, - )?; - self.ratio_pct5.as_mut().unwrap().compute_rest( - starting_indexes, - exit, - None as Option<&EagerVec>>, - )?; - self.ratio_pct95.as_mut().unwrap().compute_rest( - starting_indexes, - exit, - None as Option<&EagerVec>>, - )?; - self.ratio_pct98.as_mut().unwrap().compute_rest( - starting_indexes, - exit, - None as Option<&EagerVec>>, - )?; - self.ratio_pct99.as_mut().unwrap().compute_rest( - starting_indexes, - exit, - None as Option<&EagerVec>>, - )?; + macro_rules! compute_pct_rest { + ($($field:ident),*) => { + $(self.$field.as_mut().unwrap().compute_rest( + starting_indexes, exit, None as Option<&EagerVec>>, + )?;)* + }; + } + compute_pct_rest!( + ratio_pct1, + ratio_pct2, + ratio_pct5, + ratio_pct95, + ratio_pct98, + ratio_pct99 + ); - self.ratio_sd.as_mut().unwrap().compute_all( - starting_indexes, - exit, - &self.ratio.dateindex, - )?; - self.ratio_4y_sd.as_mut().unwrap().compute_all( - starting_indexes, - exit, - &self.ratio.dateindex, - )?; - self.ratio_2y_sd.as_mut().unwrap().compute_all( - starting_indexes, - exit, - &self.ratio.dateindex, - )?; - self.ratio_1y_sd.as_mut().unwrap().compute_all( - starting_indexes, - exit, - &self.ratio.dateindex, - )?; + macro_rules! compute_sd { + ($($field:ident),*) => { + $(self.$field.as_mut().unwrap().compute_all( + starting_indexes, exit, &self.ratio.dateindex, + )?;)* + }; + } + compute_sd!(ratio_sd, ratio_4y_sd, ratio_2y_sd, ratio_1y_sd); Ok(()) } fn mut_ratio_vecs(&mut self) -> Vec<&mut EagerVec>> { - let mut vecs = Vec::with_capacity(6); - if let Some(v) = self.ratio_pct1.as_mut() { - vecs.push(&mut v.dateindex); + macro_rules! collect_vecs { + ($($field:ident),*) => {{ + let mut vecs = Vec::with_capacity(6); + $(if let Some(v) = self.$field.as_mut() { vecs.push(&mut v.dateindex); })* + vecs + }}; } - if let Some(v) = self.ratio_pct2.as_mut() { - vecs.push(&mut v.dateindex); - } - if let Some(v) = self.ratio_pct5.as_mut() { - vecs.push(&mut v.dateindex); - } - if let Some(v) = self.ratio_pct95.as_mut() { - vecs.push(&mut v.dateindex); - } - if let Some(v) = self.ratio_pct98.as_mut() { - vecs.push(&mut v.dateindex); - } - if let Some(v) = self.ratio_pct99.as_mut() { - vecs.push(&mut v.dateindex); - } - vecs + collect_vecs!( + ratio_pct1, + ratio_pct2, + ratio_pct5, + ratio_pct95, + ratio_pct98, + ratio_pct99 + ) } } diff --git a/crates/brk_computer/src/internal/specialized/stddev.rs b/crates/brk_computer/src/internal/specialized/stddev.rs index 11c883603..3fd0d0864 100644 --- a/crates/brk_computer/src/internal/specialized/stddev.rs +++ b/crates/brk_computer/src/internal/specialized/stddev.rs @@ -10,7 +10,7 @@ use vecdb::{ use crate::{ComputeIndexes, indexes, price}; -use super::super::{BinaryDateLast, ClosePriceTimesRatio, ComputedDateLast}; +use super::super::{ClosePriceTimesRatio, ComputedDateLast, LazyBinaryDateLast}; #[derive(Clone, Traversable)] pub struct ComputedStandardDeviationVecsDate { @@ -35,19 +35,19 @@ pub struct ComputedStandardDeviationVecsDate { pub m2_5sd: Option>, pub m3sd: Option>, - pub _0sd_usd: Option, StoredF32>>, - pub p0_5sd_usd: Option, StoredF32>>, - pub p1sd_usd: Option, StoredF32>>, - pub p1_5sd_usd: Option, StoredF32>>, - pub p2sd_usd: Option, StoredF32>>, - pub p2_5sd_usd: Option, StoredF32>>, - pub p3sd_usd: Option, StoredF32>>, - pub m0_5sd_usd: Option, StoredF32>>, - pub m1sd_usd: Option, StoredF32>>, - pub m1_5sd_usd: Option, StoredF32>>, - pub m2sd_usd: Option, StoredF32>>, - pub m2_5sd_usd: Option, StoredF32>>, - pub m3sd_usd: Option, StoredF32>>, + pub _0sd_usd: Option, StoredF32>>, + pub p0_5sd_usd: Option, StoredF32>>, + pub p1sd_usd: Option, StoredF32>>, + pub p1_5sd_usd: Option, StoredF32>>, + pub p2sd_usd: Option, StoredF32>>, + pub p2_5sd_usd: Option, StoredF32>>, + pub p3sd_usd: Option, StoredF32>>, + pub m0_5sd_usd: Option, StoredF32>>, + pub m1sd_usd: Option, StoredF32>>, + pub m1_5sd_usd: Option, StoredF32>>, + pub m2sd_usd: Option, StoredF32>>, + pub m2_5sd_usd: Option, StoredF32>>, + pub m3sd_usd: Option, StoredF32>>, } #[derive(Debug, Default)] @@ -136,11 +136,11 @@ impl ComputedStandardDeviationVecsDate { macro_rules! lazy_usd { ($band:expr, $suffix:expr) => { price_vecs - .map(|p| &p.usd.timeindexes_to_price_close) + .map(|p| &p.usd.split.close) .zip($band.as_ref()) .filter(|_| options.price_bands()) .map(|(p, b)| { - BinaryDateLast::from_computed_both_last::( + LazyBinaryDateLast::from_computed_both_last::( &format!("{name}_{}", $suffix), version, p, @@ -245,18 +245,23 @@ impl ComputedStandardDeviationVecsDate { sorted.sort_unstable(); - let mut p0_5sd = self.p0_5sd.as_mut().map(|c| &mut c.dateindex); - let mut p1sd = self.p1sd.as_mut().map(|c| &mut c.dateindex); - let mut p1_5sd = self.p1_5sd.as_mut().map(|c| &mut c.dateindex); - let mut p2sd = self.p2sd.as_mut().map(|c| &mut c.dateindex); - let mut p2_5sd = self.p2_5sd.as_mut().map(|c| &mut c.dateindex); - let mut p3sd = self.p3sd.as_mut().map(|c| &mut c.dateindex); - let mut m0_5sd = self.m0_5sd.as_mut().map(|c| &mut c.dateindex); - let mut m1sd = self.m1sd.as_mut().map(|c| &mut c.dateindex); - let mut m1_5sd = self.m1_5sd.as_mut().map(|c| &mut c.dateindex); - let mut m2sd = self.m2sd.as_mut().map(|c| &mut c.dateindex); - let mut m2_5sd = self.m2_5sd.as_mut().map(|c| &mut c.dateindex); - let mut m3sd = self.m3sd.as_mut().map(|c| &mut c.dateindex); + macro_rules! band_ref { + ($field:ident) => { + self.$field.as_mut().map(|c| &mut c.dateindex) + }; + } + let mut p0_5sd = band_ref!(p0_5sd); + let mut p1sd = band_ref!(p1sd); + let mut p1_5sd = band_ref!(p1_5sd); + let mut p2sd = band_ref!(p2sd); + let mut p2_5sd = band_ref!(p2_5sd); + let mut p3sd = band_ref!(p3sd); + let mut m0_5sd = band_ref!(m0_5sd); + let mut m1sd = band_ref!(m1sd); + let mut m1_5sd = band_ref!(m1_5sd); + let mut m2sd = band_ref!(m2sd); + let mut m2_5sd = band_ref!(m2_5sd); + let mut m3sd = band_ref!(m3sd); let min_date_usize = min_date.to_usize(); let mut sma_iter = sma.iter().skip(starting_dateindex.to_usize()); @@ -269,42 +274,13 @@ impl ComputedStandardDeviationVecsDate { if index < min_date_usize { self.sd.dateindex.truncate_push_at(index, StoredF32::NAN)?; - if let Some(v) = p0_5sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = p1sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = p1_5sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = p2sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = p2_5sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = p3sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = m0_5sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = m1sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = m1_5sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = m2sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = m2_5sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? - } - if let Some(v) = m3sd.as_mut() { - v.truncate_push_at(index, StoredF32::NAN)? + macro_rules! push_nan { + ($($band:ident),*) => { + $(if let Some(v) = $band.as_mut() { v.truncate_push_at(index, StoredF32::NAN)? })* + }; } + push_nan!(p0_5sd, p1sd, p1_5sd, p2sd, p2_5sd, p3sd, m0_5sd, m1sd, m1_5sd, m2sd, m2_5sd, m3sd); + // Advance iterator to stay in sync sma_iter.next(); } else { diff --git a/crates/brk_computer/src/internal/specialized/value/block/binary.rs b/crates/brk_computer/src/internal/specialized/value/block/binary.rs index f8715473c..4cb051674 100644 --- a/crates/brk_computer/src/internal/specialized/value/block/binary.rs +++ b/crates/brk_computer/src/internal/specialized/value/block/binary.rs @@ -1,8 +1,12 @@ use brk_traversable::Traversable; use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; +use schemars::JsonSchema; use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec}; -use crate::internal::{BinaryBlockSumCum, DerivedValueBlockSumCum, ValueBlockSumCum}; +use crate::internal::{ + BinaryBlockSumCum, ComputedVecValue, DerivedValueBlockSumCum, LazyValueBlockSumCum, + ValueBlockSumCum, +}; /// Lazy value vecs computed from two ValueBlockSumCum sources via binary transforms. /// Used for computing coinbase = subsidy + fee. @@ -119,4 +123,57 @@ impl ValueBinaryBlock { dollars, } } + + pub fn from_lazy( + name: &str, + version: Version, + source1: &LazyValueBlockSumCum, + source2: &LazyValueBlockSumCum, + ) -> Self + where + SatsF: BinaryTransform, + BitcoinF: BinaryTransform, + DollarsF: BinaryTransform, + S1T: ComputedVecValue + JsonSchema, + S2T: ComputedVecValue + JsonSchema, + { + let sats = BinaryBlockSumCum::from_derived::( + name, + version, + source1.sats.height.boxed_clone(), + source2.sats.height.boxed_clone(), + &source1.sats.rest, + &source2.sats.rest, + ); + + let bitcoin = BinaryBlockSumCum::from_derived::( + &format!("{name}_btc"), + version, + source1.sats.height.boxed_clone(), + source2.sats.height.boxed_clone(), + &source1.sats.rest, + &source2.sats.rest, + ); + + let dollars = source1 + .dollars + .as_ref() + .zip(source2.dollars.as_ref()) + .map(|(d1, d2)| { + BinaryBlockSumCum::from_derived::( + &format!("{name}_usd"), + version, + d1.height.boxed_clone(), + d2.height.boxed_clone(), + &d1.rest, + &d2.rest, + ) + }); + + Self { + sats, + bitcoin, + dollars, + } + } } diff --git a/crates/brk_computer/src/internal/specialized/value/block/full.rs b/crates/brk_computer/src/internal/specialized/value/block/full.rs index a862b7e4d..e78ae1e4b 100644 --- a/crates/brk_computer/src/internal/specialized/value/block/full.rs +++ b/crates/brk_computer/src/internal/specialized/value/block/full.rs @@ -69,7 +69,7 @@ impl ValueBlockFull { // Compute dollars from bitcoin and price (if enabled) if let (Some(dollars), Some(price)) = (self.dollars.as_mut(), price) { let height_to_bitcoin = &self.bitcoin.height; - let height_to_price_close = &price.usd.chainindexes_to_price_close.height; + let height_to_price_close = &price.usd.split.close.height; dollars.compute_all(indexes, starting_indexes, exit, |v| { v.compute_from_bitcoin( diff --git a/crates/brk_computer/src/internal/specialized/value/block/height.rs b/crates/brk_computer/src/internal/specialized/value/block/height.rs new file mode 100644 index 000000000..f1d5ce006 --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/value/block/height.rs @@ -0,0 +1,49 @@ +//! Value type for Height-only storage (no derived indexes). + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; +use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, PcoVec}; + +use crate::internal::SatsToBitcoin; + +const VERSION: Version = Version::ZERO; + +/// Value type with only height indexing (no derived dateindex/periods). +/// +/// Used for metrics that are computed per height but don't need index aggregations. +#[derive(Clone, Traversable)] +pub struct ValueBlockHeight { + pub sats: EagerVec>, + pub bitcoin: LazyVecFrom1, + pub dollars: Option>>, +} + +impl ValueBlockHeight { + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + compute_dollars: bool, + ) -> Result { + let v = version + VERSION; + + let sats = EagerVec::forced_import(db, name, v)?; + + let bitcoin = LazyVecFrom1::transformed::( + &format!("{name}_btc"), + v, + sats.boxed_clone(), + ); + + let dollars = compute_dollars + .then(|| EagerVec::forced_import(db, &format!("{name}_usd"), v)) + .transpose()?; + + Ok(Self { + sats, + bitcoin, + dollars, + }) + } +} diff --git a/crates/brk_computer/src/internal/specialized/value/block/last.rs b/crates/brk_computer/src/internal/specialized/value/block/last.rs index 65c48708d..ee3ec24eb 100644 --- a/crates/brk_computer/src/internal/specialized/value/block/last.rs +++ b/crates/brk_computer/src/internal/specialized/value/block/last.rs @@ -66,10 +66,31 @@ impl ValueBlockLast { self.sats .compute_all(indexes, starting_indexes, exit, |v| compute(v))?; - // Compute dollars from bitcoin and price (if enabled) + self.compute_dollars(indexes, price, starting_indexes, exit) + } + + /// Compute derived vecs from existing height data. + pub fn compute_rest( + &mut self, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.sats.compute_rest(indexes, starting_indexes, exit)?; + self.compute_dollars(indexes, price, starting_indexes, exit) + } + + fn compute_dollars( + &mut self, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { if let (Some(dollars), Some(price)) = (self.dollars.as_mut(), price) { let height_to_bitcoin = &self.bitcoin.height; - let height_to_price_close = &price.usd.chainindexes_to_price_close.height; + let height_to_price_close = &price.usd.split.close.height; dollars.compute_all(indexes, starting_indexes, exit, |v| { v.compute_from_bitcoin( diff --git a/crates/brk_computer/src/internal/specialized/value/block/lazy.rs b/crates/brk_computer/src/internal/specialized/value/block/lazy.rs index 87937fb57..d15f26ca4 100644 --- a/crates/brk_computer/src/internal/specialized/value/block/lazy.rs +++ b/crates/brk_computer/src/internal/specialized/value/block/lazy.rs @@ -8,12 +8,12 @@ use super::LazyDerivedBlockValue; const VERSION: Version = Version::ZERO; #[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] pub struct LazyBlockValue { #[traversable(rename = "sats")] pub sats: LazyVecFrom1, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: LazyDerivedBlockValue, } diff --git a/crates/brk_computer/src/internal/specialized/value/block/lazy_computed_sum_cum.rs b/crates/brk_computer/src/internal/specialized/value/block/lazy_computed_sum_cum.rs new file mode 100644 index 000000000..7edfaa7c1 --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/value/block/lazy_computed_sum_cum.rs @@ -0,0 +1,97 @@ +//! Value type with stored height + lazy dollars for SumCum pattern. +//! +//! Use this when: +//! - Sats height is stored (primary source of truth) +//! - Sats indexes are derived from height +//! - Bitcoin is lazy (transform from sats) +//! - Dollars height is lazy (price × sats), with stored indexes + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Close, Dollars, Sats, Version}; +use vecdb::{Database, Exit, IterableCloneableVec, LazyVecFrom2}; + +use crate::{ + ComputeIndexes, indexes, + internal::{ + ClosePriceTimesSats, ComputedBlockSumCum, LazyBlockSumCum, LazyComputedBlockSumCum, + SatsToBitcoin, + }, + price, +}; + +/// Value wrapper with stored sats height + lazy dollars. +/// +/// Sats height is stored (computed directly or from stateful loop). +/// Dollars height is lazy (price × sats). +/// Cumulative and dateindex aggregates are stored for both. +#[derive(Clone, Traversable)] +pub struct LazyComputedValueBlockSumCum { + pub sats: ComputedBlockSumCum, + pub bitcoin: LazyBlockSumCum, + pub dollars: Option, Sats>>, +} + +const VERSION: Version = Version::ZERO; + +impl LazyComputedValueBlockSumCum { + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + ) -> Result { + let v = version + VERSION; + + let sats = ComputedBlockSumCum::forced_import(db, name, v, indexes)?; + + let bitcoin = LazyBlockSumCum::from_computed::( + &format!("{name}_btc"), + v, + sats.height.boxed_clone(), + &sats, + ); + + let dollars = if let Some(price) = price { + let dollars_height = LazyVecFrom2::transformed::( + &format!("{name}_usd"), + v, + price.usd.split.close.height.boxed_clone(), + sats.height.boxed_clone(), + ); + + Some(LazyComputedBlockSumCum::forced_import( + db, + &format!("{name}_usd"), + v, + indexes, + dollars_height, + )?) + } else { + None + }; + + Ok(Self { + sats, + bitcoin, + dollars, + }) + } + + /// Compute rest (derived indexes) from already-computed height. + pub fn compute_rest( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.sats.compute_rest(indexes, starting_indexes, exit)?; + + if let Some(dollars) = self.dollars.as_mut() { + dollars.derive_from(indexes, starting_indexes, exit)?; + } + + Ok(()) + } +} diff --git a/crates/brk_computer/src/internal/specialized/value/block/lazy_height.rs b/crates/brk_computer/src/internal/specialized/value/block/lazy_height.rs new file mode 100644 index 000000000..d10427a80 --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/value/block/lazy_height.rs @@ -0,0 +1,52 @@ +//! Fully lazy value type for Height indexing. + +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Dollars, Height, Sats, Version}; +use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform}; + +use crate::internal::SatsToBitcoin; + +const VERSION: Version = Version::ZERO; + +/// Fully lazy value type at height level. +/// +/// All fields are lazy transforms from existing sources - no storage. +#[derive(Clone, Traversable)] +pub struct LazyValueBlockHeight { + pub sats: LazyVecFrom1, + pub bitcoin: LazyVecFrom1, + pub dollars: Option>, +} + +impl LazyValueBlockHeight { + pub fn from_block_source( + name: &str, + source: &super::ValueBlockLast, + version: Version, + ) -> Self + where + SatsTransform: UnaryTransform, + DollarsTransform: UnaryTransform, + { + let v = version + VERSION; + + let sats = + LazyVecFrom1::transformed::(name, v, source.sats.height.boxed_clone()); + + let bitcoin = LazyVecFrom1::transformed::( + &format!("{name}_btc"), + v, + source.sats.height.boxed_clone(), + ); + + let dollars = source.dollars.as_ref().map(|d| { + LazyVecFrom1::transformed::( + &format!("{name}_usd"), + v, + d.height.boxed_clone(), + ) + }); + + Self { sats, bitcoin, dollars } + } +} diff --git a/crates/brk_computer/src/internal/specialized/value/block/lazy_last.rs b/crates/brk_computer/src/internal/specialized/value/block/lazy_last.rs new file mode 100644 index 000000000..0ebd6e097 --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/value/block/lazy_last.rs @@ -0,0 +1,46 @@ +//! Lazy value wrapper for ValueBlockLast - all transforms are lazy. + +use brk_traversable::Traversable; +use brk_types::{Dollars, Sats, Version}; +use derive_more::{Deref, DerefMut}; +use vecdb::UnaryTransform; + +use super::LazyValueBlockHeight; +use crate::internal::{LazyValueDateLast, SatsToBitcoin, ValueBlockLast}; + +const VERSION: Version = Version::ZERO; + +/// Lazy value wrapper with height + date last transforms from ValueBlockLast. +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct LazyLastBlockValue { + #[traversable(flatten)] + pub height: LazyValueBlockHeight, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub dates: LazyValueDateLast, +} + +impl LazyLastBlockValue { + pub fn from_block_source( + name: &str, + source: &ValueBlockLast, + version: Version, + ) -> Self + where + SatsTransform: UnaryTransform, + DollarsTransform: UnaryTransform, + { + let v = version + VERSION; + + let height = + LazyValueBlockHeight::from_block_source::(name, source, v); + + let dates = LazyValueDateLast::from_block_source::( + name, source, v, + ); + + Self { height, dates } + } +} diff --git a/crates/brk_computer/src/internal/specialized/value/block/lazy_sum_cum.rs b/crates/brk_computer/src/internal/specialized/value/block/lazy_sum_cum.rs new file mode 100644 index 000000000..d9c00561e --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/value/block/lazy_sum_cum.rs @@ -0,0 +1,110 @@ +//! Value type with lazy binary height + stored derived SumCum. +//! +//! Use this when the height-level sats is a lazy binary transform (e.g., mask × source). + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Close, Dollars, Height, Sats, Version}; +use schemars::JsonSchema; +use vecdb::{ + BinaryTransform, Database, Exit, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, +}; + +use crate::{ + ComputeIndexes, indexes, + internal::{ + ClosePriceTimesSats, ComputedVecValue, LazyBlockSumCum, LazyComputedBlockSumCum, + SatsToBitcoin, + }, + price, +}; + +/// Value wrapper with lazy binary height + stored derived SumCum. +/// +/// Sats height is a lazy binary transform (e.g., mask × source). +/// Dollars height is also lazy (price × sats). +/// Cumulative and dateindex are stored. +#[derive(Clone, Traversable)] +pub struct LazyValueBlockSumCum +where + S1T: ComputedVecValue + JsonSchema, + S2T: ComputedVecValue + JsonSchema, +{ + pub sats: LazyComputedBlockSumCum, + pub bitcoin: LazyBlockSumCum, + pub dollars: Option, Sats>>, +} + +const VERSION: Version = Version::ZERO; + +impl LazyValueBlockSumCum +where + S1T: ComputedVecValue + JsonSchema, + S2T: ComputedVecValue + JsonSchema, +{ + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + source1: IterableBoxedVec, + source2: IterableBoxedVec, + price: Option<&price::Vecs>, + ) -> Result + where + F: BinaryTransform, + { + let v = version + VERSION; + + let sats_height = LazyVecFrom2::transformed::(name, v, source1, source2); + let sats = LazyComputedBlockSumCum::forced_import(db, name, v, indexes, sats_height)?; + + let bitcoin = LazyBlockSumCum::from_derived::( + &format!("{name}_btc"), + v, + sats.height.boxed_clone(), + &sats.rest, + ); + + let dollars = if let Some(price) = price { + let dollars_height = LazyVecFrom2::transformed::( + &format!("{name}_usd"), + v, + price.usd.split.close.height.boxed_clone(), + sats.height.boxed_clone(), + ); + + Some(LazyComputedBlockSumCum::forced_import( + db, + &format!("{name}_usd"), + v, + indexes, + dollars_height, + )?) + } else { + None + }; + + Ok(Self { + sats, + bitcoin, + dollars, + }) + } + + /// Derive aggregates from the lazy sats height source. + pub fn derive_from( + &mut self, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.sats.derive_from(indexes, starting_indexes, exit)?; + + if let Some(dollars) = self.dollars.as_mut() { + dollars.derive_from(indexes, starting_indexes, exit)?; + } + + Ok(()) + } +} diff --git a/crates/brk_computer/src/internal/specialized/value/block/mod.rs b/crates/brk_computer/src/internal/specialized/value/block/mod.rs index e44c232df..dfc09018d 100644 --- a/crates/brk_computer/src/internal/specialized/value/block/mod.rs +++ b/crates/brk_computer/src/internal/specialized/value/block/mod.rs @@ -1,15 +1,25 @@ mod binary; mod full; +mod height; mod last; -mod lazy_derived; mod lazy; +mod lazy_computed_sum_cum; +mod lazy_derived; +mod lazy_height; +mod lazy_last; +mod lazy_sum_cum; mod sum; mod sum_cum; pub use binary::*; pub use full::*; +pub use height::*; pub use last::*; pub use lazy::*; +pub use lazy_computed_sum_cum::*; pub use lazy_derived::*; +pub use lazy_height::*; +pub use lazy_last::*; +pub use lazy_sum_cum::*; pub use sum::*; pub use sum_cum::*; diff --git a/crates/brk_computer/src/internal/specialized/value/block/sum.rs b/crates/brk_computer/src/internal/specialized/value/block/sum.rs index 979431ede..3ad6143e1 100644 --- a/crates/brk_computer/src/internal/specialized/value/block/sum.rs +++ b/crates/brk_computer/src/internal/specialized/value/block/sum.rs @@ -69,7 +69,7 @@ impl ValueBlockSum { // Compute dollars from bitcoin and price (if enabled) if let (Some(dollars), Some(price)) = (self.dollars.as_mut(), price) { let height_to_bitcoin = &self.bitcoin.height; - let height_to_price_close = &price.usd.chainindexes_to_price_close.height; + let height_to_price_close = &price.usd.split.close.height; dollars.compute_all(indexes, starting_indexes, exit, |v| { v.compute_from_bitcoin( diff --git a/crates/brk_computer/src/internal/specialized/value/block/sum_cum.rs b/crates/brk_computer/src/internal/specialized/value/block/sum_cum.rs index 6e68861f8..08c5c08f2 100644 --- a/crates/brk_computer/src/internal/specialized/value/block/sum_cum.rs +++ b/crates/brk_computer/src/internal/specialized/value/block/sum_cum.rs @@ -67,22 +67,7 @@ impl ValueBlockSumCum { self.sats .compute_all(indexes, starting_indexes, exit, |v| compute(v))?; - // Compute dollars from bitcoin and price (if enabled) - if let (Some(dollars), Some(price)) = (self.dollars.as_mut(), price) { - let height_to_bitcoin = &self.bitcoin.height; - let height_to_price_close = &price.usd.chainindexes_to_price_close.height; - - dollars.compute_all(indexes, starting_indexes, exit, |v| { - v.compute_from_bitcoin( - starting_indexes.height, - height_to_bitcoin, - height_to_price_close, - exit, - ) - })?; - } - - Ok(()) + self.compute_dollars(indexes, price, starting_indexes, exit) } /// Derive from an external height source (e.g., a LazyVec). @@ -98,10 +83,32 @@ impl ValueBlockSumCum { self.sats .derive_from(indexes, starting_indexes, source, exit)?; + self.compute_dollars(indexes, price, starting_indexes, exit) + } + + /// Compute rest (derived indexes) from already-computed height. + pub fn compute_rest( + &mut self, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.sats.compute_rest(indexes, starting_indexes, exit)?; + self.compute_dollars(indexes, price, starting_indexes, exit) + } + + fn compute_dollars( + &mut self, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { // Compute dollars from bitcoin and price (if enabled) if let (Some(dollars), Some(price)) = (self.dollars.as_mut(), price) { let height_to_bitcoin = &self.bitcoin.height; - let height_to_price_close = &price.usd.chainindexes_to_price_close.height; + let height_to_price_close = &price.usd.split.close.height; dollars.compute_all(indexes, starting_indexes, exit, |v| { v.compute_from_bitcoin( diff --git a/crates/brk_computer/src/internal/specialized/value/date/block_date_last.rs b/crates/brk_computer/src/internal/specialized/value/date/block_date_last.rs new file mode 100644 index 000000000..dfff0d365 --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/value/date/block_date_last.rs @@ -0,0 +1,81 @@ +//! Value type for stateful Last pattern - height and dateindex both stored independently. +//! +//! Use this when dateindex values are NOT derivable from height (e.g., unrealized metrics +//! where end-of-day state differs from last-block-of-day). + +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{DifficultyEpoch, Height, Sats, Version}; +use derive_more::{Deref, DerefMut}; +use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec}; + +use crate::{ComputeIndexes, indexes, price}; + +use super::super::block::LazyDerivedBlockValue; +use super::ValueDateLast; +use crate::internal::LazyLast; + +/// Value type where both height and dateindex are stored independently. +/// Dateindex values cannot be derived from height (e.g., unrealized P&L). +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct ValueBlockDateLast { + #[traversable(wrap = "sats")] + pub height: EagerVec>, + #[traversable(flatten)] + pub height_value: LazyDerivedBlockValue, + pub difficultyepoch: LazyLast, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub indexes: ValueDateLast, +} + +const VERSION: Version = Version::ZERO; + +impl ValueBlockDateLast { + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + compute_dollars: bool, + indexes: &indexes::Vecs, + price: Option<&price::Vecs>, + ) -> Result { + let v = version + VERSION; + + let height: EagerVec> = EagerVec::forced_import(db, name, v)?; + + let price_source = price.map(|p| p.usd.split.close.height.boxed_clone()); + + let height_value = + LazyDerivedBlockValue::from_source(name, height.boxed_clone(), v, price_source); + + let difficultyepoch = LazyLast::from_source( + name, + v, + height.boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), + ); + + let indexes = ValueDateLast::forced_import(db, name, v, compute_dollars, indexes)?; + + Ok(Self { + height, + height_value, + difficultyepoch, + indexes, + }) + } + + /// Compute derived periods from dateindex. + pub fn compute_dollars_from_price( + &mut self, + price: Option<&price::Vecs>, + starting_indexes: &ComputeIndexes, + exit: &Exit, + ) -> Result<()> { + self.indexes + .compute_dollars_from_price(price, starting_indexes, exit) + } +} diff --git a/crates/brk_computer/src/internal/specialized/value/date/derived_last.rs b/crates/brk_computer/src/internal/specialized/value/date/derived_last.rs index 4d230875d..5a5857257 100644 --- a/crates/brk_computer/src/internal/specialized/value/date/derived_last.rs +++ b/crates/brk_computer/src/internal/specialized/value/date/derived_last.rs @@ -52,26 +52,35 @@ impl ValueDerivedDateLast { }) } - pub fn compute_rest( + pub fn compute_dollars(&mut self, mut compute: F) -> Result<()> + where + F: FnMut(&mut ComputedDateLast) -> Result<()>, + { + if let Some(dollars) = self.dollars.as_mut() { + compute(dollars)?; + } + Ok(()) + } + + pub fn compute_dollars_from_price( &mut self, price: Option<&price::Vecs>, - _starting_indexes: &ComputeIndexes, + starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let dateindex_to_bitcoin = &*self.bitcoin.dateindex; - let dateindex_to_price_close = &price.u().usd.timeindexes_to_price_close.dateindex; - if let Some(dollars) = self.dollars.as_mut() { - dollars.compute_all(_starting_indexes, exit, |v| { + let dateindex_to_bitcoin = &*self.bitcoin.dateindex; + let dateindex_to_price_close = &price.u().usd.split.close.dateindex; + + dollars.compute_all(starting_indexes, exit, |v| { v.compute_from_bitcoin( - _starting_indexes.dateindex, + starting_indexes.dateindex, dateindex_to_bitcoin, dateindex_to_price_close, exit, ) })?; } - Ok(()) } } diff --git a/crates/brk_computer/src/internal/specialized/value/date/last.rs b/crates/brk_computer/src/internal/specialized/value/date/last.rs index b91815f79..c807737bc 100644 --- a/crates/brk_computer/src/internal/specialized/value/date/last.rs +++ b/crates/brk_computer/src/internal/specialized/value/date/last.rs @@ -11,12 +11,12 @@ use crate::{ComputeIndexes, indexes, price}; use super::ValueDerivedDateLast; #[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] pub struct ValueDateLast { #[traversable(rename = "sats")] pub sats_dateindex: EagerVec>, #[deref] #[deref_mut] - #[traversable(flatten)] pub rest: ValueDerivedDateLast, } @@ -47,27 +47,41 @@ impl ValueDateLast { }) } + pub fn compute_sats(&mut self, mut compute: F) -> Result<()> + where + F: FnMut(&mut EagerVec>) -> Result<()>, + { + compute(&mut self.sats_dateindex) + } + pub fn compute_all( &mut self, price: Option<&price::Vecs>, starting_indexes: &ComputeIndexes, exit: &Exit, - mut compute: F, + compute: F, ) -> Result<()> where F: FnMut(&mut EagerVec>) -> Result<()>, { - compute(&mut self.sats_dateindex)?; - self.rest.compute_rest(price, starting_indexes, exit)?; - Ok(()) + self.compute_sats(compute)?; + self.compute_dollars_from_price(price, starting_indexes, exit) } - pub fn compute_rest( + pub fn compute_dollars(&mut self, compute: F) -> Result<()> + where + F: FnMut(&mut crate::internal::ComputedDateLast) -> Result<()>, + { + self.rest.compute_dollars(compute) + } + + pub fn compute_dollars_from_price( &mut self, price: Option<&price::Vecs>, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.rest.compute_rest(price, starting_indexes, exit) + self.rest + .compute_dollars_from_price(price, starting_indexes, exit) } } diff --git a/crates/brk_computer/src/internal/specialized/value/date/lazy_last.rs b/crates/brk_computer/src/internal/specialized/value/date/lazy_last.rs index 7e5b95864..f610fcd01 100644 --- a/crates/brk_computer/src/internal/specialized/value/date/lazy_last.rs +++ b/crates/brk_computer/src/internal/specialized/value/date/lazy_last.rs @@ -4,7 +4,7 @@ use brk_traversable::Traversable; use brk_types::{Bitcoin, Dollars, Sats, Version}; use vecdb::{IterableCloneableVec, UnaryTransform}; -use crate::internal::{LazyDateLast, ValueDateLast}; +use crate::internal::{LazyDateLast, ValueBlockLast, ValueDateLast}; const VERSION: Version = Version::ZERO; @@ -53,4 +53,42 @@ impl LazyValueDateLast { Self { sats, bitcoin, dollars } } + + pub fn from_block_source( + name: &str, + source: &ValueBlockLast, + version: Version, + ) -> Self + where + SatsTransform: UnaryTransform, + BitcoinTransform: UnaryTransform, + DollarsTransform: UnaryTransform, + { + let v = version + VERSION; + + let sats = LazyDateLast::from_derived::( + name, + v, + source.sats.rest.dateindex.0.boxed_clone(), + &source.sats.rest.dates, + ); + + let bitcoin = LazyDateLast::from_derived::( + &format!("{name}_btc"), + v, + source.sats.rest.dateindex.0.boxed_clone(), + &source.sats.rest.dates, + ); + + let dollars = source.dollars.as_ref().map(|dollars_source| { + LazyDateLast::from_derived::( + &format!("{name}_usd"), + v, + dollars_source.rest.dateindex.0.boxed_clone(), + &dollars_source.rest.dates, + ) + }); + + Self { sats, bitcoin, dollars } + } } diff --git a/crates/brk_computer/src/internal/specialized/value/date/mod.rs b/crates/brk_computer/src/internal/specialized/value/date/mod.rs index b98939532..cdc966757 100644 --- a/crates/brk_computer/src/internal/specialized/value/date/mod.rs +++ b/crates/brk_computer/src/internal/specialized/value/date/mod.rs @@ -1,7 +1,9 @@ +mod block_date_last; mod derived_last; mod last; mod lazy_last; +pub use block_date_last::*; pub use derived_last::*; pub use last::*; pub use lazy_last::*; diff --git a/crates/brk_computer/src/internal/specialized/value/derived_block/sum_cum.rs b/crates/brk_computer/src/internal/specialized/value/derived_block/sum_cum.rs index 7bf228d79..fe8900235 100644 --- a/crates/brk_computer/src/internal/specialized/value/derived_block/sum_cum.rs +++ b/crates/brk_computer/src/internal/specialized/value/derived_block/sum_cum.rs @@ -7,19 +7,19 @@ use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, use crate::{ ComputeIndexes, indexes, - internal::{ClosePriceTimesSats, DerivedComputedBlockSumCum, LazyBlockSumCum, SatsToBitcoin}, + internal::{ + ClosePriceTimesSats, DerivedComputedBlockSumCum, LazyBlockSumCum, LazyComputedBlockSumCum, + SatsToBitcoin, + }, price, }; -pub type LazyDollarsHeight = LazyVecFrom2, Height, Sats>; - /// Value wrapper for derived SumCum (derives from external height source). #[derive(Clone, Traversable)] pub struct DerivedValueBlockSumCum { pub sats: DerivedComputedBlockSumCum, pub bitcoin: LazyBlockSumCum, - pub dollars_source: Option, - pub dollars: Option>, + pub dollars: Option, Sats>>, } const VERSION: Version = Version::ZERO; @@ -50,31 +50,28 @@ impl DerivedValueBlockSumCum { &sats, ); - let (dollars_source, dollars) = if let Some(price) = price { - let dollars_source = LazyVecFrom2::transformed::( + let dollars = if let Some(price) = price { + let dollars_height = LazyVecFrom2::transformed::( &format!("{name}_usd"), v, - price.usd.chainindexes_to_price_close.height.boxed_clone(), + price.usd.split.close.height.boxed_clone(), sats_source.boxed_clone(), ); - let dollars = DerivedComputedBlockSumCum::forced_import( + Some(LazyComputedBlockSumCum::forced_import( db, &format!("{name}_usd"), - dollars_source.boxed_clone(), v, indexes, - )?; - - (Some(dollars_source), Some(dollars)) + dollars_height, + )?) } else { - (None, None) + None }; Ok(Self { sats, bitcoin, - dollars_source, dollars, }) } @@ -90,10 +87,8 @@ impl DerivedValueBlockSumCum { self.sats .derive_from(indexes, starting_indexes, sats_source, exit)?; - if let (Some(dollars), Some(dollars_source)) = - (self.dollars.as_mut(), self.dollars_source.as_ref()) - { - dollars.derive_from(indexes, starting_indexes, dollars_source, exit)?; + if let Some(dollars) = self.dollars.as_mut() { + dollars.derive_from(indexes, starting_indexes, exit)?; } Ok(()) diff --git a/crates/brk_computer/src/internal/specialized/value/tx/derived_full.rs b/crates/brk_computer/src/internal/specialized/value/tx/derived_full.rs new file mode 100644 index 000000000..907ac66ca --- /dev/null +++ b/crates/brk_computer/src/internal/specialized/value/tx/derived_full.rs @@ -0,0 +1,79 @@ +//! Value type for Full pattern from TxIndex. + +use brk_error::Result; +use brk_indexer::Indexer; +use brk_traversable::Traversable; +use brk_types::{Bitcoin, Sats, TxIndex, Version}; +use vecdb::{CollectableVec, Database, Exit, IterableCloneableVec}; + +use crate::{ + ComputeIndexes, indexes, + internal::{DerivedTxFull, DollarsTxFull, LazyDerivedTxFull, SatsToBitcoin}, + price, +}; + +#[derive(Clone, Traversable)] +pub struct ValueDerivedTxFull { + pub sats: DerivedTxFull, + pub bitcoin: LazyDerivedTxFull, + pub dollars: Option, +} + +const VERSION: Version = Version::ZERO; + +impl ValueDerivedTxFull { + pub fn forced_import( + db: &Database, + name: &str, + version: Version, + indexes: &indexes::Vecs, + indexer: &Indexer, + price: Option<&price::Vecs>, + sats_txindex: &impl IterableCloneableVec, + ) -> Result { + let v = version + VERSION; + + let sats = DerivedTxFull::forced_import(db, name, v, indexes)?; + + let bitcoin = + LazyDerivedTxFull::from_computed::(&format!("{name}_btc"), v, &sats); + + let dollars = price + .map(|price| { + DollarsTxFull::forced_import( + db, + &format!("{name}_usd"), + v, + indexes, + sats_txindex.boxed_clone(), + indexer.vecs.transactions.height.boxed_clone(), + price.usd.split.close.height.boxed_clone(), + ) + }) + .transpose()?; + + Ok(Self { + sats, + bitcoin, + dollars, + }) + } + + pub fn derive_from( + &mut self, + indexer: &Indexer, + indexes: &indexes::Vecs, + starting_indexes: &ComputeIndexes, + txindex_source: &impl CollectableVec, + exit: &Exit, + ) -> Result<()> { + self.sats + .derive_from(indexer, indexes, starting_indexes, txindex_source, exit)?; + + if let Some(dollars) = self.dollars.as_mut() { + dollars.derive_from(indexer, indexes, starting_indexes, exit)?; + } + + Ok(()) + } +} diff --git a/crates/brk_computer/src/internal/specialized/value/tx/dollars.rs b/crates/brk_computer/src/internal/specialized/value/tx/dollars.rs index 7146b9501..4aa8527dd 100644 --- a/crates/brk_computer/src/internal/specialized/value/tx/dollars.rs +++ b/crates/brk_computer/src/internal/specialized/value/tx/dollars.rs @@ -29,7 +29,6 @@ pub struct DollarsTxFull { pub dateindex: Stats, #[deref] #[deref_mut] - #[traversable(flatten)] pub dates: DerivedDateFull, } @@ -61,10 +60,7 @@ impl DollarsTxFull { height.distribution.minmax.max.0.boxed_clone(), height.sum_cum.sum.0.boxed_clone(), height.sum_cum.cumulative.0.boxed_clone(), - indexes - .block - .difficultyepoch_to_difficultyepoch - .boxed_clone(), + indexes.difficultyepoch.identity.boxed_clone(), ); let dates = DerivedDateFull::from_sources( @@ -97,16 +93,16 @@ impl DollarsTxFull { self.height.compute( starting_indexes.height, &self.txindex, - &indexer.vecs.tx.height_to_first_txindex, - &indexes.block.height_to_txindex_count, + &indexer.vecs.transactions.first_txindex, + &indexes.height.txindex_count, exit, )?; self.dateindex.compute( starting_indexes.dateindex, &self.height.distribution.average.0, - &indexes.time.dateindex_to_first_height, - &indexes.time.dateindex_to_height_count, + &indexes.dateindex.first_height, + &indexes.dateindex.height_count, exit, )?; diff --git a/crates/brk_computer/src/internal/specialized/value/tx/full.rs b/crates/brk_computer/src/internal/specialized/value/tx/full.rs index 6204763f6..775fac96e 100644 --- a/crates/brk_computer/src/internal/specialized/value/tx/full.rs +++ b/crates/brk_computer/src/internal/specialized/value/tx/full.rs @@ -1,27 +1,30 @@ -//! Value type for Full pattern from TxIndex. +//! ValueTxFull - eager txindex Sats source + ValueDerivedTxFull (sats/bitcoin/dollars). use brk_error::Result; use brk_indexer::Indexer; use brk_traversable::Traversable; -use brk_types::{Bitcoin, Sats, TxIndex, Version}; -use vecdb::{CollectableVec, Database, Exit, IterableCloneableVec}; +use brk_types::{Sats, TxIndex, Version}; +use derive_more::{Deref, DerefMut}; +use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec}; -use crate::{ - ComputeIndexes, indexes, - internal::{DerivedTxFull, DollarsTxFull, LazyDerivedTxFull, SatsToBitcoin}, - price, -}; +use crate::{ComputeIndexes, indexes, price}; -#[derive(Clone, Traversable)] -pub struct ValueDerivedTxFull { - pub sats: DerivedTxFull, - pub bitcoin: LazyDerivedTxFull, - pub dollars: Option, -} +use super::ValueDerivedTxFull; const VERSION: Version = Version::ZERO; -impl ValueDerivedTxFull { +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(merge)] +pub struct ValueTxFull { + #[traversable(wrap = "sats")] + pub base: EagerVec>, + #[deref] + #[deref_mut] + #[traversable(flatten)] + pub indexes: ValueDerivedTxFull, +} + +impl ValueTxFull { pub fn forced_import( db: &Database, name: &str, @@ -29,33 +32,14 @@ impl ValueDerivedTxFull { indexes: &indexes::Vecs, indexer: &Indexer, price: Option<&price::Vecs>, - sats_txindex: &impl IterableCloneableVec, ) -> Result { let v = version + VERSION; - - let sats = DerivedTxFull::forced_import(db, name, v, indexes)?; - - let bitcoin = - LazyDerivedTxFull::from_computed::(&format!("{name}_btc"), v, &sats); - - let dollars = price - .map(|price| { - DollarsTxFull::forced_import( - db, - &format!("{name}_usd"), - v, - indexes, - sats_txindex.boxed_clone(), - indexer.vecs.tx.txindex_to_height.boxed_clone(), - price.usd.chainindexes_to_price_close.height.boxed_clone(), - ) - }) - .transpose()?; - + let txindex = EagerVec::forced_import(db, name, v)?; + let derived = + ValueDerivedTxFull::forced_import(db, name, v, indexes, indexer, price, &txindex)?; Ok(Self { - sats, - bitcoin, - dollars, + base: txindex, + indexes: derived, }) } @@ -64,16 +48,9 @@ impl ValueDerivedTxFull { indexer: &Indexer, indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, - txindex_source: &impl CollectableVec, exit: &Exit, ) -> Result<()> { - self.sats - .derive_from(indexer, indexes, starting_indexes, txindex_source, exit)?; - - if let Some(dollars) = self.dollars.as_mut() { - dollars.derive_from(indexer, indexes, starting_indexes, exit)?; - } - - Ok(()) + self.indexes + .derive_from(indexer, indexes, starting_indexes, &self.base, exit) } } diff --git a/crates/brk_computer/src/internal/specialized/value/tx/mod.rs b/crates/brk_computer/src/internal/specialized/value/tx/mod.rs index b3e24d62b..e1ec076b8 100644 --- a/crates/brk_computer/src/internal/specialized/value/tx/mod.rs +++ b/crates/brk_computer/src/internal/specialized/value/tx/mod.rs @@ -1,5 +1,7 @@ +mod derived_full; mod dollars; mod full; +pub use derived_full::*; pub use dollars::*; pub use full::*; diff --git a/crates/brk_computer/src/internal/transform/mod.rs b/crates/brk_computer/src/internal/transform/mod.rs index 8c7b8c18c..b0cb6d221 100644 --- a/crates/brk_computer/src/internal/transform/mod.rs +++ b/crates/brk_computer/src/internal/transform/mod.rs @@ -8,7 +8,6 @@ mod dollar_plus; mod dollar_times_tenths; mod f32_identity; mod half_close_price_times_sats; -mod percentage_btc_f64; mod percentage_diff_close_dollars; mod percentage_dollars_f32; mod percentage_dollars_f32_neg; @@ -46,7 +45,6 @@ pub use dollar_plus::*; pub use dollar_times_tenths::*; pub use f32_identity::*; pub use half_close_price_times_sats::*; -pub use percentage_btc_f64::*; pub use percentage_diff_close_dollars::*; pub use percentage_dollars_f32::*; pub use percentage_dollars_f32_neg::*; diff --git a/crates/brk_computer/src/internal/transform/percentage_btc_f64.rs b/crates/brk_computer/src/internal/transform/percentage_btc_f64.rs deleted file mode 100644 index 5393ab891..000000000 --- a/crates/brk_computer/src/internal/transform/percentage_btc_f64.rs +++ /dev/null @@ -1,14 +0,0 @@ -use brk_types::{Bitcoin, StoredF64}; -use vecdb::BinaryTransform; - -/// (Bitcoin, Bitcoin) -> StoredF64 percentage (a/b × 100) -/// Used for supply ratio calculations like supply_in_profit / total_supply × 100 -pub struct PercentageBtcF64; - -impl BinaryTransform for PercentageBtcF64 { - #[inline(always)] - fn apply(numerator: Bitcoin, denominator: Bitcoin) -> StoredF64 { - // Bitcoin / Bitcoin returns StoredF64, so dereference and multiply - StoredF64::from(*(numerator / denominator) * 100.0) - } -} diff --git a/crates/brk_computer/src/internal/vec/cumulative.rs b/crates/brk_computer/src/internal/vec/cumulative.rs index f7ef16b43..4eeba6d6d 100644 --- a/crates/brk_computer/src/internal/vec/cumulative.rs +++ b/crates/brk_computer/src/internal/vec/cumulative.rs @@ -8,7 +8,7 @@ use crate::internal::ComputedVecValue; /// Cumulative sum across aggregation periods #[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(wrap = "cumulative")] +#[traversable(transparent)] pub struct CumulativeVec( pub EagerVec>, ); diff --git a/crates/brk_computer/src/internal/vec/first.rs b/crates/brk_computer/src/internal/vec/first.rs new file mode 100644 index 000000000..b4c00d9e3 --- /dev/null +++ b/crates/brk_computer/src/internal/vec/first.rs @@ -0,0 +1,74 @@ +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::StoredU64; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{ + AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableVec, + PcoVec, VecIndex, VecValue, Version, +}; + +use crate::internal::ComputedVecValue; + +/// First value in an aggregation period +#[derive(Clone, Deref, DerefMut, Traversable)] +pub struct FirstVec(pub EagerVec>); + +impl FirstVec { + pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + Ok(Self(EagerVec::forced_import(db, name, version)?)) + } + + #[inline] + pub fn inner(&self) -> &EagerVec> { + &self.0 + } + + /// Compute first values from a source vec. + /// + /// For each output index I, takes the first value from the corresponding + /// range in the source vec (indexed by A). + pub fn compute_first( + &mut self, + max_from: I, + source: &impl IterableVec, + first_indexes: &impl IterableVec, + count_indexes: &impl IterableVec, + exit: &Exit, + ) -> Result<()> + where + A: VecIndex + VecValue, + { + self.0.validate_computed_version_or_reset( + source.version() + first_indexes.version() + count_indexes.version(), + )?; + + let index = max_from.min(I::from(self.0.len())); + + let mut source_iter = source.iter(); + let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize()); + + first_indexes + .iter() + .enumerate() + .skip(index.to_usize()) + .try_for_each(|(i, first_index)| -> Result<()> { + let count_index = count_indexes_iter.next().unwrap(); + let count = *count_index as usize; + + if count == 0 { + panic!("should not compute first if count can be 0"); + } + + let v = source_iter.get_unwrap(first_index); + self.0.truncate_push_at(i, v)?; + + Ok(()) + })?; + + let _lock = exit.lock(); + self.0.write()?; + + Ok(()) + } +} diff --git a/crates/brk_computer/src/internal/vec/median.rs b/crates/brk_computer/src/internal/vec/median.rs deleted file mode 100644 index 51ab50881..000000000 --- a/crates/brk_computer/src/internal/vec/median.rs +++ /dev/null @@ -1,24 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec, VecIndex, Version}; - -use crate::internal::ComputedVecValue; - -/// Median (50th percentile) in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct MedianVec( - pub EagerVec>, -); - -impl MedianVec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, &format!("{name}_median"), version)?)) - } - - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } -} diff --git a/crates/brk_computer/src/internal/vec/mod.rs b/crates/brk_computer/src/internal/vec/mod.rs index f1fbfcf97..0bfdbd286 100644 --- a/crates/brk_computer/src/internal/vec/mod.rs +++ b/crates/brk_computer/src/internal/vec/mod.rs @@ -1,23 +1,17 @@ mod average; mod cumulative; +mod first; mod last; mod max; -mod median; mod min; -mod pct10; -mod pct25; -mod pct75; -mod pct90; +mod percentiles; mod sum; pub use average::*; pub use cumulative::*; +pub use first::*; pub use last::*; pub use max::*; -pub use median::*; pub use min::*; -pub use pct10::*; -pub use pct25::*; -pub use pct75::*; -pub use pct90::*; +pub use percentiles::*; pub use sum::*; diff --git a/crates/brk_computer/src/internal/vec/pct10.rs b/crates/brk_computer/src/internal/vec/pct10.rs deleted file mode 100644 index 65d099ead..000000000 --- a/crates/brk_computer/src/internal/vec/pct10.rs +++ /dev/null @@ -1,24 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec, VecIndex, Version}; - -use crate::internal::ComputedVecValue; - -/// 10th percentile in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct Pct10Vec( - pub EagerVec>, -); - -impl Pct10Vec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, &format!("{name}_pct10"), version)?)) - } - - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } -} diff --git a/crates/brk_computer/src/internal/vec/pct25.rs b/crates/brk_computer/src/internal/vec/pct25.rs deleted file mode 100644 index 0ca26d48b..000000000 --- a/crates/brk_computer/src/internal/vec/pct25.rs +++ /dev/null @@ -1,24 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec, VecIndex, Version}; - -use crate::internal::ComputedVecValue; - -/// 25th percentile in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct Pct25Vec( - pub EagerVec>, -); - -impl Pct25Vec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, &format!("{name}_pct25"), version)?)) - } - - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } -} diff --git a/crates/brk_computer/src/internal/vec/pct75.rs b/crates/brk_computer/src/internal/vec/pct75.rs deleted file mode 100644 index 26e957b54..000000000 --- a/crates/brk_computer/src/internal/vec/pct75.rs +++ /dev/null @@ -1,24 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec, VecIndex, Version}; - -use crate::internal::ComputedVecValue; - -/// 75th percentile in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct Pct75Vec( - pub EagerVec>, -); - -impl Pct75Vec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, &format!("{name}_pct75"), version)?)) - } - - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } -} diff --git a/crates/brk_computer/src/internal/vec/pct90.rs b/crates/brk_computer/src/internal/vec/pct90.rs deleted file mode 100644 index 25e15ef11..000000000 --- a/crates/brk_computer/src/internal/vec/pct90.rs +++ /dev/null @@ -1,24 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use derive_more::{Deref, DerefMut}; -use schemars::JsonSchema; -use vecdb::{Database, EagerVec, ImportableVec, PcoVec, VecIndex, Version}; - -use crate::internal::ComputedVecValue; - -/// 90th percentile in an aggregation period -#[derive(Clone, Deref, DerefMut, Traversable)] -pub struct Pct90Vec( - pub EagerVec>, -); - -impl Pct90Vec { - pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { - Ok(Self(EagerVec::forced_import(db, &format!("{name}_pct90"), version)?)) - } - - #[inline] - pub fn inner(&self) -> &EagerVec> { - &self.0 - } -} diff --git a/crates/brk_computer/src/internal/vec/percentiles.rs b/crates/brk_computer/src/internal/vec/percentiles.rs new file mode 100644 index 000000000..be34bc264 --- /dev/null +++ b/crates/brk_computer/src/internal/vec/percentiles.rs @@ -0,0 +1,36 @@ +//! Percentile vec types for aggregation periods. + +use brk_error::Result; +use brk_traversable::Traversable; +use derive_more::{Deref, DerefMut}; +use schemars::JsonSchema; +use vecdb::{Database, EagerVec, ImportableVec, PcoVec, VecIndex, Version}; + +use crate::internal::ComputedVecValue; + +macro_rules! define_percentile_vec { + ($name:ident, $suffix:literal, $doc:literal) => { + #[doc = $doc] + #[derive(Clone, Deref, DerefMut, Traversable)] + pub struct $name( + pub EagerVec>, + ); + + impl $name { + pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { + Ok(Self(EagerVec::forced_import(db, &format!("{name}_{}", $suffix), version)?)) + } + + #[inline] + pub fn inner(&self) -> &EagerVec> { + &self.0 + } + } + }; +} + +define_percentile_vec!(Pct10Vec, "pct10", "10th percentile in an aggregation period"); +define_percentile_vec!(Pct25Vec, "pct25", "25th percentile in an aggregation period"); +define_percentile_vec!(MedianVec, "median", "Median (50th percentile) in an aggregation period"); +define_percentile_vec!(Pct75Vec, "pct75", "75th percentile in an aggregation period"); +define_percentile_vec!(Pct90Vec, "pct90", "90th percentile in an aggregation period"); diff --git a/crates/brk_computer/src/internal/vec/sum.rs b/crates/brk_computer/src/internal/vec/sum.rs index 32c8bcf6a..5098bde3e 100644 --- a/crates/brk_computer/src/internal/vec/sum.rs +++ b/crates/brk_computer/src/internal/vec/sum.rs @@ -8,7 +8,7 @@ use crate::internal::ComputedVecValue; /// Sum of values in an aggregation period #[derive(Clone, Deref, DerefMut, Traversable)] -#[traversable(wrap = "sum")] +#[traversable(transparent)] pub struct SumVec( pub EagerVec>, ); diff --git a/crates/brk_computer/src/lib.rs b/crates/brk_computer/src/lib.rs index 2fefce45d..e25e0f2f4 100644 --- a/crates/brk_computer/src/lib.rs +++ b/crates/brk_computer/src/lib.rs @@ -8,7 +8,7 @@ use brk_indexer::Indexer; use brk_reader::Reader; use brk_traversable::Traversable; use brk_types::Version; -use log::info; +use tracing::info; use vecdb::Exit; mod blocks; @@ -375,6 +375,7 @@ impl Computer { self.pools.compute( indexer, &self.indexes, + &self.blocks, &starting_indexes_clone, exit, )?; diff --git a/crates/brk_computer/src/market/ath/compute.rs b/crates/brk_computer/src/market/ath/compute.rs index eb04d2b66..ec961a3c8 100644 --- a/crates/brk_computer/src/market/ath/compute.rs +++ b/crates/brk_computer/src/market/ath/compute.rs @@ -12,34 +12,35 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.height_to_price_ath.compute_all_time_high( + self.price_ath.height.compute_all_time_high( starting_indexes.height, - &price.usd.chainindexes_to_price_high.height, + &price.usd.split.high.height, exit, )?; - self.height_to_price_drawdown.compute_drawdown( + self.price_drawdown.height.compute_drawdown( starting_indexes.height, - &price.usd.chainindexes_to_price_close.height, - &self.height_to_price_ath, + &price.usd.split.close.height, + &self.price_ath.height, exit, )?; - self.indexes_to_price_ath.compute_all(starting_indexes, exit, |v| { - v.compute_all_time_high( - starting_indexes.dateindex, - &price.usd.timeindexes_to_price_high.dateindex, - exit, - )?; - Ok(()) - })?; + self.price_ath.compute_rest(starting_indexes, exit, |v| { + v.compute_all_time_high( + starting_indexes.dateindex, + &price.usd.split.high.dateindex, + exit, + )?; + Ok(()) + })?; - self.indexes_to_days_since_price_ath.compute_all(starting_indexes, exit, |v| { - let mut high_iter = price.usd.timeindexes_to_price_high.dateindex.into_iter(); + self.days_since_price_ath + .compute_all(starting_indexes, exit, |v| { + let mut high_iter = price.usd.split.high.dateindex.into_iter(); let mut prev = None; v.compute_transform( starting_indexes.dateindex, - &self.indexes_to_price_ath.dateindex, + &self.price_ath.dateindex, |(i, ath, slf)| { if prev.is_none() { let i = i.to_usize(); @@ -62,11 +63,12 @@ impl Vecs { Ok(()) })?; - self.indexes_to_max_days_between_price_aths.compute_all(starting_indexes, exit, |v| { + self.max_days_between_price_aths + .compute_all(starting_indexes, exit, |v| { let mut prev = None; v.compute_transform( starting_indexes.dateindex, - &self.indexes_to_days_since_price_ath.dateindex, + &self.days_since_price_ath.dateindex, |(i, days, slf)| { if prev.is_none() { let i = i.to_usize(); diff --git a/crates/brk_computer/src/market/ath/import.rs b/crates/brk_computer/src/market/ath/import.rs index 1bef9478d..69a7e7679 100644 --- a/crates/brk_computer/src/market/ath/import.rs +++ b/crates/brk_computer/src/market/ath/import.rs @@ -6,8 +6,8 @@ use super::Vecs; use crate::{ indexes, internal::{ - BinaryDateLast, ComputedDateLast, LazyDateLast, PercentageDiffCloseDollars, - StoredU16ToYears, + BinaryHeightDateLast, ComputedDateLast, ComputedHeightDateLast, LazyDateLast, + PercentageDiffCloseDollars, StoredU16ToYears, }, price, }; @@ -19,48 +19,44 @@ impl Vecs { indexes: &indexes::Vecs, price: &price::Vecs, ) -> Result { - let indexes_to_price_ath = - ComputedDateLast::forced_import(db, "price_ath", version, indexes)?; + let price_ath = ComputedHeightDateLast::forced_import(db, "price_ath", version, indexes)?; - let indexes_to_max_days_between_price_aths = + let max_days_between_price_aths = ComputedDateLast::forced_import(db, "max_days_between_price_aths", version, indexes)?; - let indexes_to_max_years_between_price_aths = LazyDateLast::from_computed::( + let max_years_between_price_aths = LazyDateLast::from_computed::( "max_years_between_price_aths", version, - indexes_to_max_days_between_price_aths - .dateindex - .boxed_clone(), - &indexes_to_max_days_between_price_aths, + max_days_between_price_aths.dateindex.boxed_clone(), + &max_days_between_price_aths, ); - let indexes_to_days_since_price_ath = + let days_since_price_ath = ComputedDateLast::forced_import(db, "days_since_price_ath", version, indexes)?; - let indexes_to_years_since_price_ath = LazyDateLast::from_computed::( + let years_since_price_ath = LazyDateLast::from_computed::( "years_since_price_ath", version, - indexes_to_days_since_price_ath.dateindex.boxed_clone(), - &indexes_to_days_since_price_ath, + days_since_price_ath.dateindex.boxed_clone(), + &days_since_price_ath, ); - let indexes_to_price_drawdown = - BinaryDateLast::from_computed_both_last::( + let price_drawdown = + BinaryHeightDateLast::from_computed_both_last::( "price_drawdown", version, - &price.usd.timeindexes_to_price_close, - &indexes_to_price_ath, + EagerVec::forced_import(db, "price_drawdown", version)?, + &price.usd.split.close, + &price_ath.rest, ); Ok(Self { - height_to_price_ath: EagerVec::forced_import(db, "price_ath", version)?, - height_to_price_drawdown: EagerVec::forced_import(db, "price_drawdown", version)?, - indexes_to_price_ath, - indexes_to_price_drawdown, - indexes_to_days_since_price_ath, - indexes_to_years_since_price_ath, - indexes_to_max_days_between_price_aths, - indexes_to_max_years_between_price_aths, + price_ath, + price_drawdown, + days_since_price_ath, + years_since_price_ath, + max_days_between_price_aths, + max_years_between_price_aths, }) } } diff --git a/crates/brk_computer/src/market/ath/vecs.rs b/crates/brk_computer/src/market/ath/vecs.rs index e6dc3a056..e83cdcda5 100644 --- a/crates/brk_computer/src/market/ath/vecs.rs +++ b/crates/brk_computer/src/market/ath/vecs.rs @@ -1,19 +1,17 @@ use brk_traversable::Traversable; -use brk_types::{Close, Dollars, Height, StoredF32, StoredU16}; -use vecdb::{EagerVec, PcoVec}; +use brk_types::{Close, Dollars, StoredF32, StoredU16}; -use crate::internal::{BinaryDateLast, ComputedDateLast, LazyDateLast}; +use crate::internal::{ + BinaryHeightDateLast, ComputedDateLast, ComputedHeightDateLast, LazyDateLast, +}; /// All-time high related metrics #[derive(Clone, Traversable)] pub struct Vecs { - pub height_to_price_ath: EagerVec>, - pub height_to_price_drawdown: EagerVec>, - pub indexes_to_price_ath: ComputedDateLast, - // KISS: both sources are ComputedVecsDateLast - pub indexes_to_price_drawdown: BinaryDateLast, Dollars>, - pub indexes_to_days_since_price_ath: ComputedDateLast, - pub indexes_to_years_since_price_ath: LazyDateLast, - pub indexes_to_max_days_between_price_aths: ComputedDateLast, - pub indexes_to_max_years_between_price_aths: LazyDateLast, + pub price_ath: ComputedHeightDateLast, + pub price_drawdown: BinaryHeightDateLast, Dollars>, + pub days_since_price_ath: ComputedDateLast, + pub years_since_price_ath: LazyDateLast, + pub max_days_between_price_aths: ComputedDateLast, + pub max_years_between_price_aths: LazyDateLast, } diff --git a/crates/brk_computer/src/market/dca/compute.rs b/crates/brk_computer/src/market/dca/compute.rs index daaecaae9..75d039b50 100644 --- a/crates/brk_computer/src/market/dca/compute.rs +++ b/crates/brk_computer/src/market/dca/compute.rs @@ -17,7 +17,7 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close = &price.usd.timeindexes_to_price_close.dateindex; + let close = &price.usd.split.close.dateindex; // DCA by period - stack for (stack, days) in self.period_stack.iter_mut_with_days() { @@ -51,7 +51,6 @@ impl Vecs { // DCA by period - CAGR (computed from returns) for (cagr, returns, days) in self.period_cagr.zip_mut_with_period(&self.period_returns) { cagr.compute_all(starting_indexes, exit, |v| { - // KISS: dateindex is no longer Option v.compute_cagr( starting_indexes.dateindex, &returns.dateindex, diff --git a/crates/brk_computer/src/market/dca/import.rs b/crates/brk_computer/src/market/dca/import.rs index 3a7bb6bd6..27cc24cd1 100644 --- a/crates/brk_computer/src/market/dca/import.rs +++ b/crates/brk_computer/src/market/dca/import.rs @@ -5,7 +5,7 @@ use vecdb::Database; use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod, DCA_CLASS_NAMES, DCA_PERIOD_NAMES, Vecs}; use crate::{ indexes, - internal::{BinaryDateLast, ComputedDateLast, PercentageDiffCloseDollars, ValueDateLast}, + internal::{ComputedDateLast, LazyBinaryDateLast, PercentageDiffCloseDollars, ValueDateLast}, price, }; @@ -31,15 +31,14 @@ impl Vecs { ) })?; - // KISS: DCA by period - returns (lazy, derived from price and average_price) let period_returns = DCA_PERIOD_NAMES .zip_ref(&period_average_price) .map(|(name, average_price)| { - BinaryDateLast::from_computed_both_last::( + LazyBinaryDateLast::from_computed_both_last::( &format!("{name}_dca_returns"), version, - &price.usd.timeindexes_to_price_close, + &price.usd.split.close, average_price, ) }); @@ -70,15 +69,14 @@ impl Vecs { ComputedDateLast::forced_import(db, &format!("{name}_average_price"), version, indexes) })?; - // KISS: DCA by year class - returns (lazy) let class_returns = DCA_CLASS_NAMES .zip_ref(&class_average_price) .map(|(name, average_price)| { - BinaryDateLast::from_computed_both_last::( + LazyBinaryDateLast::from_computed_both_last::( &format!("{name}_returns"), version, - &price.usd.timeindexes_to_price_close, + &price.usd.split.close, average_price, ) }); diff --git a/crates/brk_computer/src/market/dca/vecs.rs b/crates/brk_computer/src/market/dca/vecs.rs index 2c76a9bb8..82da55f42 100644 --- a/crates/brk_computer/src/market/dca/vecs.rs +++ b/crates/brk_computer/src/market/dca/vecs.rs @@ -2,7 +2,7 @@ use brk_traversable::Traversable; use brk_types::{Close, Dollars, StoredF32}; use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod}; -use crate::internal::{BinaryDateLast, ComputedDateLast, ValueDateLast}; +use crate::internal::{ComputedDateLast, LazyBinaryDateLast, ValueDateLast}; /// Dollar-cost averaging metrics by time period and year class #[derive(Clone, Traversable)] @@ -10,7 +10,7 @@ pub struct Vecs { // DCA by period - KISS types pub period_stack: ByDcaPeriod, pub period_average_price: ByDcaPeriod>, - pub period_returns: ByDcaPeriod, Dollars>>, + pub period_returns: ByDcaPeriod, Dollars>>, pub period_cagr: ByDcaCagr>, // Lump sum by period (for comparison with DCA) - KISS types @@ -19,5 +19,5 @@ pub struct Vecs { // DCA by year class - KISS types pub class_stack: ByDcaClass, pub class_average_price: ByDcaClass>, - pub class_returns: ByDcaClass, Dollars>>, + pub class_returns: ByDcaClass, Dollars>>, } diff --git a/crates/brk_computer/src/market/indicators/compute.rs b/crates/brk_computer/src/market/indicators/compute.rs index a30e131cb..6427bc90d 100644 --- a/crates/brk_computer/src/market/indicators/compute.rs +++ b/crates/brk_computer/src/market/indicators/compute.rs @@ -22,11 +22,10 @@ impl Vecs { exit: &Exit, ) -> Result<()> { if let (Some(puell), Some(sma), Some(coinbase_dollars)) = ( - self.indexes_to_puell_multiple.as_mut(), - rewards.indexes_to_subsidy_usd_1y_sma.as_ref(), - rewards.indexes_to_coinbase.dollars.as_ref(), + self.puell_multiple.as_mut(), + rewards.subsidy_usd_1y_sma.as_ref(), + rewards.coinbase.dollars.as_ref(), ) { - // KISS: dateindex.sum is now a concrete field let date_to_coinbase_usd_sum = &coinbase_dollars.dateindex.sum_cum.sum.0; puell.compute_all(starting_indexes, exit, |v| { @@ -40,51 +39,50 @@ impl Vecs { })?; } - // KISS: dateindex is no longer Option let returns_dateindex = &returns.price_returns._1d.dateindex; - self.dateindex_to_rsi_gains.compute_transform( + self.rsi_gains.compute_transform( starting_indexes.dateindex, returns_dateindex, |(i, ret, ..)| (i, StoredF32::from((*ret).max(0.0))), exit, )?; - self.dateindex_to_rsi_losses.compute_transform( + self.rsi_losses.compute_transform( starting_indexes.dateindex, returns_dateindex, |(i, ret, ..)| (i, StoredF32::from((-*ret).max(0.0))), exit, )?; - self.dateindex_to_rsi_average_gain_14d.compute_rma( + self.rsi_average_gain_14d.compute_rma( starting_indexes.dateindex, - &self.dateindex_to_rsi_gains, + &self.rsi_gains, 14, exit, )?; - self.dateindex_to_rsi_average_loss_14d.compute_rma( + self.rsi_average_loss_14d.compute_rma( starting_indexes.dateindex, - &self.dateindex_to_rsi_losses, + &self.rsi_losses, 14, exit, )?; let ema12 = &moving_average - .indexes_to_price_12d_ema + .price_12d_ema .price .as_ref() .unwrap() .dateindex; let ema26 = &moving_average - .indexes_to_price_26d_ema + .price_26d_ema .price .as_ref() .unwrap() .dateindex; - self.dateindex_to_macd_line.compute_transform2( + self.macd_line.compute_transform2( starting_indexes.dateindex, ema12, ema26, @@ -92,33 +90,21 @@ impl Vecs { exit, )?; - self.dateindex_to_macd_signal.compute_ema( - starting_indexes.dateindex, - &self.dateindex_to_macd_line, - 9, - exit, - )?; + self.macd_signal + .compute_ema(starting_indexes.dateindex, &self.macd_line, 9, exit)?; // Stochastic RSI: StochRSI = (RSI - min) / (max - min) * 100 - self.dateindex_to_rsi_14d_min.compute_min( - starting_indexes.dateindex, - &self.dateindex_to_rsi_14d, - 14, - exit, - )?; + self.rsi_14d_min + .compute_min(starting_indexes.dateindex, &self.rsi_14d, 14, exit)?; - self.dateindex_to_rsi_14d_max.compute_max( - starting_indexes.dateindex, - &self.dateindex_to_rsi_14d, - 14, - exit, - )?; + self.rsi_14d_max + .compute_max(starting_indexes.dateindex, &self.rsi_14d, 14, exit)?; - self.dateindex_to_stoch_rsi.compute_transform3( + self.stoch_rsi.compute_transform3( starting_indexes.dateindex, - &self.dateindex_to_rsi_14d, - &self.dateindex_to_rsi_14d_min, - &self.dateindex_to_rsi_14d_max, + &self.rsi_14d, + &self.rsi_14d_min, + &self.rsi_14d_max, |(i, rsi, min, max, ..)| { let range = *max - *min; let stoch = if range == 0.0 { @@ -131,26 +117,18 @@ impl Vecs { exit, )?; - self.dateindex_to_stoch_rsi_k.compute_sma( - starting_indexes.dateindex, - &self.dateindex_to_stoch_rsi, - 3, - exit, - )?; + self.stoch_rsi_k + .compute_sma(starting_indexes.dateindex, &self.stoch_rsi, 3, exit)?; - self.dateindex_to_stoch_rsi_d.compute_sma( - starting_indexes.dateindex, - &self.dateindex_to_stoch_rsi_k, - 3, - exit, - )?; + self.stoch_rsi_d + .compute_sma(starting_indexes.dateindex, &self.stoch_rsi_k, 3, exit)?; // Stochastic Oscillator: K = (close - low_14) / (high_14 - low_14) * 100 { - let close = &price.usd.timeindexes_to_price_close.dateindex; - let low_2w = &range.indexes_to_price_2w_min.dateindex; - let high_2w = &range.indexes_to_price_2w_max.dateindex; - self.dateindex_to_stoch_k.compute_transform3( + let close = &price.usd.split.close.dateindex; + let low_2w = &range.price_2w_min.dateindex; + let high_2w = &range.price_2w_max.dateindex; + self.stoch_k.compute_transform3( starting_indexes.dateindex, close, low_2w, @@ -167,23 +145,19 @@ impl Vecs { exit, )?; - self.dateindex_to_stoch_d.compute_sma( - starting_indexes.dateindex, - &self.dateindex_to_stoch_k, - 3, - exit, - )?; + self.stoch_d + .compute_sma(starting_indexes.dateindex, &self.stoch_k, 3, exit)?; } let amount_range = &distribution.utxo_cohorts.amount_range; - // KISS: dateindex is now always present (not Option) + let supply_vecs: Vec<_> = amount_range .iter() - .map(|c| &c.metrics.supply.indexes_to_supply.sats_dateindex) + .map(|c| &c.metrics.supply.supply.sats.dateindex.0) .collect(); let count_vecs: Vec<_> = amount_range .iter() - .map(|c| &c.metrics.supply.indexes_to_utxo_count.dateindex) + .map(|c| &c.metrics.outputs.utxo_count.dateindex) .collect(); if let Some(first_supply) = supply_vecs.first() @@ -198,7 +172,7 @@ impl Vecs { let mut supply_iters: Vec<_> = supply_vecs.iter().map(|v| v.into_iter()).collect(); let mut count_iters: Vec<_> = count_vecs.iter().map(|v| v.into_iter()).collect(); - self.dateindex_to_gini.compute_to( + self.gini.compute_to( starting_indexes.dateindex, first_supply.len(), version, diff --git a/crates/brk_computer/src/market/indicators/import.rs b/crates/brk_computer/src/market/indicators/import.rs index 212f3faf3..bce0769bc 100644 --- a/crates/brk_computer/src/market/indicators/import.rs +++ b/crates/brk_computer/src/market/indicators/import.rs @@ -5,7 +5,7 @@ use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom use super::{super::moving_average, Vecs}; use crate::{ distribution, indexes, - internal::{BinaryDateLast, ComputedDateLast, DifferenceF32, Ratio32, RsiFormula}, + internal::{ComputedDateLast, DifferenceF32, LazyBinaryDateLast, Ratio32, RsiFormula}, transactions, }; @@ -24,96 +24,87 @@ impl Vecs { let v = version + VERSION; // NVT = Market Cap (KISS DateIndex) / Volume (Height) - let indexes_to_nvt = distribution + let nvt = distribution .utxo_cohorts .all .metrics .supply - .indexes_to_supply + .supply .dollars .as_ref() - .zip(transactions.volume.indexes_to_sent_sum.dollars.as_ref()) + .zip(transactions.volume.sent_sum.dollars.as_ref()) .map(|(market_cap, volume)| { - // KISS: market_cap is ComputedVecsDateLast, volume is ComputedBlockSum - BinaryDateLast::from_dateindex_last_and_height_sum::( + LazyBinaryDateLast::from_block_last_and_height_sum::( "nvt", v, market_cap, volume, ) }); - let dateindex_to_rsi_gains = EagerVec::forced_import(db, "rsi_gains", v)?; - let dateindex_to_rsi_losses = EagerVec::forced_import(db, "rsi_losses", v)?; + let rsi_gains = EagerVec::forced_import(db, "rsi_gains", v)?; + let rsi_losses = EagerVec::forced_import(db, "rsi_losses", v)?; // v1: Changed from SMA to RMA (Wilder's smoothing) - let dateindex_to_rsi_average_gain_14d = + let rsi_average_gain_14d = EagerVec::forced_import(db, "rsi_average_gain_14d", v + Version::ONE)?; - let dateindex_to_rsi_average_loss_14d = + let rsi_average_loss_14d = EagerVec::forced_import(db, "rsi_average_loss_14d", v + Version::ONE)?; - let dateindex_to_rsi_14d = LazyVecFrom2::transformed::( + let rsi_14d = LazyVecFrom2::transformed::( "rsi_14d", v, - dateindex_to_rsi_average_gain_14d.boxed_clone(), - dateindex_to_rsi_average_loss_14d.boxed_clone(), + rsi_average_gain_14d.boxed_clone(), + rsi_average_loss_14d.boxed_clone(), ); - let dateindex_to_macd_line = EagerVec::forced_import(db, "macd_line", v)?; - let dateindex_to_macd_signal = EagerVec::forced_import(db, "macd_signal", v)?; - let dateindex_to_macd_histogram = LazyVecFrom2::transformed::( + let macd_line = EagerVec::forced_import(db, "macd_line", v)?; + let macd_signal = EagerVec::forced_import(db, "macd_signal", v)?; + let macd_histogram = LazyVecFrom2::transformed::( "macd_histogram", v, - dateindex_to_macd_line.boxed_clone(), - dateindex_to_macd_signal.boxed_clone(), + macd_line.boxed_clone(), + macd_signal.boxed_clone(), ); - let dateindex_to_rsi_14d_min = EagerVec::forced_import(db, "rsi_14d_min", v)?; - let dateindex_to_rsi_14d_max = EagerVec::forced_import(db, "rsi_14d_max", v)?; - let dateindex_to_stoch_rsi = EagerVec::forced_import(db, "stoch_rsi", v)?; - let dateindex_to_stoch_rsi_k = EagerVec::forced_import(db, "stoch_rsi_k", v)?; - let dateindex_to_stoch_rsi_d = EagerVec::forced_import(db, "stoch_rsi_d", v)?; + let rsi_14d_min = EagerVec::forced_import(db, "rsi_14d_min", v)?; + let rsi_14d_max = EagerVec::forced_import(db, "rsi_14d_max", v)?; + let stoch_rsi = EagerVec::forced_import(db, "stoch_rsi", v)?; + let stoch_rsi_k = EagerVec::forced_import(db, "stoch_rsi_k", v)?; + let stoch_rsi_d = EagerVec::forced_import(db, "stoch_rsi_d", v)?; - let dateindex_to_stoch_k = EagerVec::forced_import(db, "stoch_k", v)?; - let dateindex_to_stoch_d = EagerVec::forced_import(db, "stoch_d", v)?; + let stoch_k = EagerVec::forced_import(db, "stoch_k", v)?; + let stoch_d = EagerVec::forced_import(db, "stoch_d", v)?; - let dateindex_to_gini = EagerVec::forced_import(db, "gini", v)?; + let gini = EagerVec::forced_import(db, "gini", v)?; // Pi Cycle Top: 111d SMA / (2 * 350d SMA) - signals top when > 1 - let dateindex_to_pi_cycle = - moving_average - .indexes_to_price_111d_sma - .price - .as_ref() - .map(|sma_111| { - LazyVecFrom2::transformed::( - "pi_cycle", - v, - sma_111.dateindex.boxed_clone(), - moving_average - .indexes_to_price_350d_sma_x2 - .dateindex - .boxed_clone(), - ) - }); + let pi_cycle = moving_average.price_111d_sma.price.as_ref().map(|sma_111| { + LazyVecFrom2::transformed::( + "pi_cycle", + v, + sma_111.dateindex.boxed_clone(), + moving_average.price_350d_sma_x2.dateindex.boxed_clone(), + ) + }); Ok(Self { - indexes_to_puell_multiple: compute_dollars + puell_multiple: compute_dollars .then(|| ComputedDateLast::forced_import(db, "puell_multiple", v, indexes)) .transpose()?, - indexes_to_nvt, - dateindex_to_rsi_gains, - dateindex_to_rsi_losses, - dateindex_to_rsi_average_gain_14d, - dateindex_to_rsi_average_loss_14d, - dateindex_to_rsi_14d, - dateindex_to_rsi_14d_min, - dateindex_to_rsi_14d_max, - dateindex_to_stoch_rsi, - dateindex_to_stoch_rsi_k, - dateindex_to_stoch_rsi_d, - dateindex_to_stoch_k, - dateindex_to_stoch_d, - dateindex_to_pi_cycle, - dateindex_to_macd_line, - dateindex_to_macd_signal, - dateindex_to_macd_histogram, - dateindex_to_gini, + nvt, + rsi_gains, + rsi_losses, + rsi_average_gain_14d, + rsi_average_loss_14d, + rsi_14d, + rsi_14d_min, + rsi_14d_max, + stoch_rsi, + stoch_rsi_k, + stoch_rsi_d, + stoch_k, + stoch_d, + pi_cycle, + macd_line, + macd_signal, + macd_histogram, + gini, }) } } diff --git a/crates/brk_computer/src/market/indicators/vecs.rs b/crates/brk_computer/src/market/indicators/vecs.rs index 0f87aa2d4..b56fe5c35 100644 --- a/crates/brk_computer/src/market/indicators/vecs.rs +++ b/crates/brk_computer/src/market/indicators/vecs.rs @@ -2,36 +2,34 @@ use brk_traversable::Traversable; use brk_types::{DateIndex, Dollars, StoredF32}; use vecdb::{EagerVec, LazyVecFrom2, PcoVec}; -use crate::internal::{BinaryDateLast, ComputedDateLast}; +use crate::internal::{ComputedDateLast, LazyBinaryDateLast}; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_puell_multiple: Option>, - pub indexes_to_nvt: Option>, + pub puell_multiple: Option>, + pub nvt: Option>, - pub dateindex_to_rsi_gains: EagerVec>, - pub dateindex_to_rsi_losses: EagerVec>, - pub dateindex_to_rsi_average_gain_14d: EagerVec>, - pub dateindex_to_rsi_average_loss_14d: EagerVec>, - pub dateindex_to_rsi_14d: - LazyVecFrom2, + pub rsi_gains: EagerVec>, + pub rsi_losses: EagerVec>, + pub rsi_average_gain_14d: EagerVec>, + pub rsi_average_loss_14d: EagerVec>, + pub rsi_14d: LazyVecFrom2, + pub rsi_14d_min: EagerVec>, + pub rsi_14d_max: EagerVec>, + pub stoch_rsi: EagerVec>, + pub stoch_rsi_k: EagerVec>, + pub stoch_rsi_d: EagerVec>, - pub dateindex_to_rsi_14d_min: EagerVec>, - pub dateindex_to_rsi_14d_max: EagerVec>, - pub dateindex_to_stoch_rsi: EagerVec>, - pub dateindex_to_stoch_rsi_k: EagerVec>, - pub dateindex_to_stoch_rsi_d: EagerVec>, + pub stoch_k: EagerVec>, + pub stoch_d: EagerVec>, - pub dateindex_to_stoch_k: EagerVec>, - pub dateindex_to_stoch_d: EagerVec>, - - pub dateindex_to_pi_cycle: + pub pi_cycle: Option>, - pub dateindex_to_macd_line: EagerVec>, - pub dateindex_to_macd_signal: EagerVec>, - pub dateindex_to_macd_histogram: + pub macd_line: EagerVec>, + pub macd_signal: EagerVec>, + pub macd_histogram: LazyVecFrom2, - pub dateindex_to_gini: EagerVec>, + pub gini: EagerVec>, } diff --git a/crates/brk_computer/src/market/lookback/compute.rs b/crates/brk_computer/src/market/lookback/compute.rs index fec98bf1a..57315e9f8 100644 --- a/crates/brk_computer/src/market/lookback/compute.rs +++ b/crates/brk_computer/src/market/lookback/compute.rs @@ -2,7 +2,7 @@ use brk_error::Result; use vecdb::Exit; use super::Vecs; -use crate::{price, ComputeIndexes}; +use crate::{ComputeIndexes, price}; impl Vecs { pub fn compute( @@ -11,7 +11,7 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close = &price.usd.timeindexes_to_price_close.dateindex; + let close = &price.usd.split.close.dateindex; for (price_ago, days) in self.price_ago.iter_mut_with_days() { price_ago.compute_all(starting_indexes, exit, |v| { diff --git a/crates/brk_computer/src/market/moving_average/compute.rs b/crates/brk_computer/src/market/moving_average/compute.rs index 38e043113..0deee77b0 100644 --- a/crates/brk_computer/src/market/moving_average/compute.rs +++ b/crates/brk_computer/src/market/moving_average/compute.rs @@ -2,7 +2,7 @@ use brk_error::Result; use vecdb::Exit; use super::Vecs; -use crate::{price, ComputeIndexes}; +use crate::{ComputeIndexes, price}; impl Vecs { pub fn compute( @@ -11,25 +11,25 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let close = &price.usd.timeindexes_to_price_close.dateindex; + let close = &price.usd.split.close.dateindex; for (sma, period) in [ - (&mut self.indexes_to_price_1w_sma, 7), - (&mut self.indexes_to_price_8d_sma, 8), - (&mut self.indexes_to_price_13d_sma, 13), - (&mut self.indexes_to_price_21d_sma, 21), - (&mut self.indexes_to_price_1m_sma, 30), - (&mut self.indexes_to_price_34d_sma, 34), - (&mut self.indexes_to_price_55d_sma, 55), - (&mut self.indexes_to_price_89d_sma, 89), - (&mut self.indexes_to_price_111d_sma, 111), - (&mut self.indexes_to_price_144d_sma, 144), - (&mut self.indexes_to_price_200d_sma, 200), - (&mut self.indexes_to_price_350d_sma, 350), - (&mut self.indexes_to_price_1y_sma, 365), - (&mut self.indexes_to_price_2y_sma, 2 * 365), - (&mut self.indexes_to_price_200w_sma, 200 * 7), - (&mut self.indexes_to_price_4y_sma, 4 * 365), + (&mut self.price_1w_sma, 7), + (&mut self.price_8d_sma, 8), + (&mut self.price_13d_sma, 13), + (&mut self.price_21d_sma, 21), + (&mut self.price_1m_sma, 30), + (&mut self.price_34d_sma, 34), + (&mut self.price_55d_sma, 55), + (&mut self.price_89d_sma, 89), + (&mut self.price_111d_sma, 111), + (&mut self.price_144d_sma, 144), + (&mut self.price_200d_sma, 200), + (&mut self.price_350d_sma, 350), + (&mut self.price_1y_sma, 365), + (&mut self.price_2y_sma, 2 * 365), + (&mut self.price_200w_sma, 200 * 7), + (&mut self.price_4y_sma, 4 * 365), ] { sma.compute_all(price, starting_indexes, exit, |v| { v.compute_sma(starting_indexes.dateindex, close, period, exit)?; @@ -38,22 +38,22 @@ impl Vecs { } for (ema, period) in [ - (&mut self.indexes_to_price_1w_ema, 7), - (&mut self.indexes_to_price_8d_ema, 8), - (&mut self.indexes_to_price_12d_ema, 12), - (&mut self.indexes_to_price_13d_ema, 13), - (&mut self.indexes_to_price_21d_ema, 21), - (&mut self.indexes_to_price_26d_ema, 26), - (&mut self.indexes_to_price_1m_ema, 30), - (&mut self.indexes_to_price_34d_ema, 34), - (&mut self.indexes_to_price_55d_ema, 55), - (&mut self.indexes_to_price_89d_ema, 89), - (&mut self.indexes_to_price_144d_ema, 144), - (&mut self.indexes_to_price_200d_ema, 200), - (&mut self.indexes_to_price_1y_ema, 365), - (&mut self.indexes_to_price_2y_ema, 2 * 365), - (&mut self.indexes_to_price_200w_ema, 200 * 7), - (&mut self.indexes_to_price_4y_ema, 4 * 365), + (&mut self.price_1w_ema, 7), + (&mut self.price_8d_ema, 8), + (&mut self.price_12d_ema, 12), + (&mut self.price_13d_ema, 13), + (&mut self.price_21d_ema, 21), + (&mut self.price_26d_ema, 26), + (&mut self.price_1m_ema, 30), + (&mut self.price_34d_ema, 34), + (&mut self.price_55d_ema, 55), + (&mut self.price_89d_ema, 89), + (&mut self.price_144d_ema, 144), + (&mut self.price_200d_ema, 200), + (&mut self.price_1y_ema, 365), + (&mut self.price_2y_ema, 2 * 365), + (&mut self.price_200w_ema, 200 * 7), + (&mut self.price_4y_ema, 4 * 365), ] { ema.compute_all(price, starting_indexes, exit, |v| { v.compute_ema(starting_indexes.dateindex, close, period, exit)?; diff --git a/crates/brk_computer/src/market/moving_average/import.rs b/crates/brk_computer/src/market/moving_average/import.rs index 16c47dae2..b65388317 100644 --- a/crates/brk_computer/src/market/moving_average/import.rs +++ b/crates/brk_computer/src/market/moving_average/import.rs @@ -16,7 +16,7 @@ impl Vecs { indexes: &indexes::Vecs, price: Option<&price::Vecs>, ) -> Result { - let indexes_to_price_1w_sma = ComputedRatioVecsDate::forced_import( + let price_1w_sma = ComputedRatioVecsDate::forced_import( db, "price_1w_sma", None, @@ -25,7 +25,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_8d_sma = ComputedRatioVecsDate::forced_import( + let price_8d_sma = ComputedRatioVecsDate::forced_import( db, "price_8d_sma", None, @@ -34,7 +34,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_13d_sma = ComputedRatioVecsDate::forced_import( + let price_13d_sma = ComputedRatioVecsDate::forced_import( db, "price_13d_sma", None, @@ -43,7 +43,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_21d_sma = ComputedRatioVecsDate::forced_import( + let price_21d_sma = ComputedRatioVecsDate::forced_import( db, "price_21d_sma", None, @@ -52,7 +52,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_1m_sma = ComputedRatioVecsDate::forced_import( + let price_1m_sma = ComputedRatioVecsDate::forced_import( db, "price_1m_sma", None, @@ -61,7 +61,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_34d_sma = ComputedRatioVecsDate::forced_import( + let price_34d_sma = ComputedRatioVecsDate::forced_import( db, "price_34d_sma", None, @@ -70,7 +70,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_55d_sma = ComputedRatioVecsDate::forced_import( + let price_55d_sma = ComputedRatioVecsDate::forced_import( db, "price_55d_sma", None, @@ -79,7 +79,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_89d_sma = ComputedRatioVecsDate::forced_import( + let price_89d_sma = ComputedRatioVecsDate::forced_import( db, "price_89d_sma", None, @@ -88,7 +88,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_111d_sma = ComputedRatioVecsDate::forced_import( + let price_111d_sma = ComputedRatioVecsDate::forced_import( db, "price_111d_sma", None, @@ -97,7 +97,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_144d_sma = ComputedRatioVecsDate::forced_import( + let price_144d_sma = ComputedRatioVecsDate::forced_import( db, "price_144d_sma", None, @@ -106,7 +106,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_200d_sma = ComputedRatioVecsDate::forced_import( + let price_200d_sma = ComputedRatioVecsDate::forced_import( db, "price_200d_sma", None, @@ -115,7 +115,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_350d_sma = ComputedRatioVecsDate::forced_import( + let price_350d_sma = ComputedRatioVecsDate::forced_import( db, "price_350d_sma", None, @@ -124,7 +124,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_1y_sma = ComputedRatioVecsDate::forced_import( + let price_1y_sma = ComputedRatioVecsDate::forced_import( db, "price_1y_sma", None, @@ -133,7 +133,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_2y_sma = ComputedRatioVecsDate::forced_import( + let price_2y_sma = ComputedRatioVecsDate::forced_import( db, "price_2y_sma", None, @@ -142,7 +142,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_200w_sma = ComputedRatioVecsDate::forced_import( + let price_200w_sma = ComputedRatioVecsDate::forced_import( db, "price_200w_sma", None, @@ -151,7 +151,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_4y_sma = ComputedRatioVecsDate::forced_import( + let price_4y_sma = ComputedRatioVecsDate::forced_import( db, "price_4y_sma", None, @@ -161,7 +161,7 @@ impl Vecs { price, )?; - let indexes_to_price_1w_ema = ComputedRatioVecsDate::forced_import( + let price_1w_ema = ComputedRatioVecsDate::forced_import( db, "price_1w_ema", None, @@ -170,7 +170,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_8d_ema = ComputedRatioVecsDate::forced_import( + let price_8d_ema = ComputedRatioVecsDate::forced_import( db, "price_8d_ema", None, @@ -179,7 +179,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_12d_ema = ComputedRatioVecsDate::forced_import( + let price_12d_ema = ComputedRatioVecsDate::forced_import( db, "price_12d_ema", None, @@ -188,7 +188,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_13d_ema = ComputedRatioVecsDate::forced_import( + let price_13d_ema = ComputedRatioVecsDate::forced_import( db, "price_13d_ema", None, @@ -197,7 +197,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_21d_ema = ComputedRatioVecsDate::forced_import( + let price_21d_ema = ComputedRatioVecsDate::forced_import( db, "price_21d_ema", None, @@ -206,7 +206,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_26d_ema = ComputedRatioVecsDate::forced_import( + let price_26d_ema = ComputedRatioVecsDate::forced_import( db, "price_26d_ema", None, @@ -215,7 +215,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_1m_ema = ComputedRatioVecsDate::forced_import( + let price_1m_ema = ComputedRatioVecsDate::forced_import( db, "price_1m_ema", None, @@ -224,7 +224,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_34d_ema = ComputedRatioVecsDate::forced_import( + let price_34d_ema = ComputedRatioVecsDate::forced_import( db, "price_34d_ema", None, @@ -233,7 +233,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_55d_ema = ComputedRatioVecsDate::forced_import( + let price_55d_ema = ComputedRatioVecsDate::forced_import( db, "price_55d_ema", None, @@ -242,7 +242,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_89d_ema = ComputedRatioVecsDate::forced_import( + let price_89d_ema = ComputedRatioVecsDate::forced_import( db, "price_89d_ema", None, @@ -251,7 +251,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_144d_ema = ComputedRatioVecsDate::forced_import( + let price_144d_ema = ComputedRatioVecsDate::forced_import( db, "price_144d_ema", None, @@ -260,7 +260,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_200d_ema = ComputedRatioVecsDate::forced_import( + let price_200d_ema = ComputedRatioVecsDate::forced_import( db, "price_200d_ema", None, @@ -269,7 +269,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_1y_ema = ComputedRatioVecsDate::forced_import( + let price_1y_ema = ComputedRatioVecsDate::forced_import( db, "price_1y_ema", None, @@ -278,7 +278,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_2y_ema = ComputedRatioVecsDate::forced_import( + let price_2y_ema = ComputedRatioVecsDate::forced_import( db, "price_2y_ema", None, @@ -287,7 +287,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_200w_ema = ComputedRatioVecsDate::forced_import( + let price_200w_ema = ComputedRatioVecsDate::forced_import( db, "price_200w_ema", None, @@ -296,7 +296,7 @@ impl Vecs { true, price, )?; - let indexes_to_price_4y_ema = ComputedRatioVecsDate::forced_import( + let price_4y_ema = ComputedRatioVecsDate::forced_import( db, "price_4y_ema", None, @@ -306,66 +306,63 @@ impl Vecs { price, )?; - let price_200d_sma_source = indexes_to_price_200d_sma.price.as_ref().unwrap(); - let indexes_to_price_200d_sma_x2_4 = - LazyDateLast::from_source::>( - "price_200d_sma_x2_4", - version, - price_200d_sma_source, - ); - let indexes_to_price_200d_sma_x0_8 = - LazyDateLast::from_source::>( - "price_200d_sma_x0_8", - version, - price_200d_sma_source, - ); + let price_200d_sma_source = price_200d_sma.price.as_ref().unwrap(); + let price_200d_sma_x2_4 = LazyDateLast::from_source::>( + "price_200d_sma_x2_4", + version, + price_200d_sma_source, + ); + let price_200d_sma_x0_8 = LazyDateLast::from_source::>( + "price_200d_sma_x0_8", + version, + price_200d_sma_source, + ); - let price_350d_sma_source = indexes_to_price_350d_sma.price.as_ref().unwrap(); - let indexes_to_price_350d_sma_x2 = - LazyDateLast::from_source::>( - "price_350d_sma_x2", - version, - price_350d_sma_source, - ); + let price_350d_sma_source = price_350d_sma.price.as_ref().unwrap(); + let price_350d_sma_x2 = LazyDateLast::from_source::>( + "price_350d_sma_x2", + version, + price_350d_sma_source, + ); Ok(Self { - indexes_to_price_1w_sma, - indexes_to_price_8d_sma, - indexes_to_price_13d_sma, - indexes_to_price_21d_sma, - indexes_to_price_1m_sma, - indexes_to_price_34d_sma, - indexes_to_price_55d_sma, - indexes_to_price_89d_sma, - indexes_to_price_111d_sma, - indexes_to_price_144d_sma, - indexes_to_price_200d_sma, - indexes_to_price_350d_sma, - indexes_to_price_1y_sma, - indexes_to_price_2y_sma, - indexes_to_price_200w_sma, - indexes_to_price_4y_sma, + price_1w_sma, + price_8d_sma, + price_13d_sma, + price_21d_sma, + price_1m_sma, + price_34d_sma, + price_55d_sma, + price_89d_sma, + price_111d_sma, + price_144d_sma, + price_200d_sma, + price_350d_sma, + price_1y_sma, + price_2y_sma, + price_200w_sma, + price_4y_sma, - indexes_to_price_1w_ema, - indexes_to_price_8d_ema, - indexes_to_price_12d_ema, - indexes_to_price_13d_ema, - indexes_to_price_21d_ema, - indexes_to_price_26d_ema, - indexes_to_price_1m_ema, - indexes_to_price_34d_ema, - indexes_to_price_55d_ema, - indexes_to_price_89d_ema, - indexes_to_price_144d_ema, - indexes_to_price_200d_ema, - indexes_to_price_1y_ema, - indexes_to_price_2y_ema, - indexes_to_price_200w_ema, - indexes_to_price_4y_ema, + price_1w_ema, + price_8d_ema, + price_12d_ema, + price_13d_ema, + price_21d_ema, + price_26d_ema, + price_1m_ema, + price_34d_ema, + price_55d_ema, + price_89d_ema, + price_144d_ema, + price_200d_ema, + price_1y_ema, + price_2y_ema, + price_200w_ema, + price_4y_ema, - indexes_to_price_200d_sma_x2_4, - indexes_to_price_200d_sma_x0_8, - indexes_to_price_350d_sma_x2, + price_200d_sma_x2_4, + price_200d_sma_x0_8, + price_350d_sma_x2, }) } } diff --git a/crates/brk_computer/src/market/moving_average/vecs.rs b/crates/brk_computer/src/market/moving_average/vecs.rs index 67f94c2c6..f6ab01dca 100644 --- a/crates/brk_computer/src/market/moving_average/vecs.rs +++ b/crates/brk_computer/src/market/moving_average/vecs.rs @@ -6,41 +6,41 @@ use crate::internal::{ComputedRatioVecsDate, LazyDateLast}; /// Simple and exponential moving average metrics #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_price_1w_sma: ComputedRatioVecsDate, - pub indexes_to_price_8d_sma: ComputedRatioVecsDate, - pub indexes_to_price_13d_sma: ComputedRatioVecsDate, - pub indexes_to_price_21d_sma: ComputedRatioVecsDate, - pub indexes_to_price_1m_sma: ComputedRatioVecsDate, - pub indexes_to_price_34d_sma: ComputedRatioVecsDate, - pub indexes_to_price_55d_sma: ComputedRatioVecsDate, - pub indexes_to_price_89d_sma: ComputedRatioVecsDate, - pub indexes_to_price_111d_sma: ComputedRatioVecsDate, - pub indexes_to_price_144d_sma: ComputedRatioVecsDate, - pub indexes_to_price_200d_sma: ComputedRatioVecsDate, - pub indexes_to_price_350d_sma: ComputedRatioVecsDate, - pub indexes_to_price_1y_sma: ComputedRatioVecsDate, - pub indexes_to_price_2y_sma: ComputedRatioVecsDate, - pub indexes_to_price_200w_sma: ComputedRatioVecsDate, - pub indexes_to_price_4y_sma: ComputedRatioVecsDate, + pub price_1w_sma: ComputedRatioVecsDate, + pub price_8d_sma: ComputedRatioVecsDate, + pub price_13d_sma: ComputedRatioVecsDate, + pub price_21d_sma: ComputedRatioVecsDate, + pub price_1m_sma: ComputedRatioVecsDate, + pub price_34d_sma: ComputedRatioVecsDate, + pub price_55d_sma: ComputedRatioVecsDate, + pub price_89d_sma: ComputedRatioVecsDate, + pub price_111d_sma: ComputedRatioVecsDate, + pub price_144d_sma: ComputedRatioVecsDate, + pub price_200d_sma: ComputedRatioVecsDate, + pub price_350d_sma: ComputedRatioVecsDate, + pub price_1y_sma: ComputedRatioVecsDate, + pub price_2y_sma: ComputedRatioVecsDate, + pub price_200w_sma: ComputedRatioVecsDate, + pub price_4y_sma: ComputedRatioVecsDate, - pub indexes_to_price_1w_ema: ComputedRatioVecsDate, - pub indexes_to_price_8d_ema: ComputedRatioVecsDate, - pub indexes_to_price_12d_ema: ComputedRatioVecsDate, - pub indexes_to_price_13d_ema: ComputedRatioVecsDate, - pub indexes_to_price_21d_ema: ComputedRatioVecsDate, - pub indexes_to_price_26d_ema: ComputedRatioVecsDate, - pub indexes_to_price_1m_ema: ComputedRatioVecsDate, - pub indexes_to_price_34d_ema: ComputedRatioVecsDate, - pub indexes_to_price_55d_ema: ComputedRatioVecsDate, - pub indexes_to_price_89d_ema: ComputedRatioVecsDate, - pub indexes_to_price_144d_ema: ComputedRatioVecsDate, - pub indexes_to_price_200d_ema: ComputedRatioVecsDate, - pub indexes_to_price_1y_ema: ComputedRatioVecsDate, - pub indexes_to_price_2y_ema: ComputedRatioVecsDate, - pub indexes_to_price_200w_ema: ComputedRatioVecsDate, - pub indexes_to_price_4y_ema: ComputedRatioVecsDate, + pub price_1w_ema: ComputedRatioVecsDate, + pub price_8d_ema: ComputedRatioVecsDate, + pub price_12d_ema: ComputedRatioVecsDate, + pub price_13d_ema: ComputedRatioVecsDate, + pub price_21d_ema: ComputedRatioVecsDate, + pub price_26d_ema: ComputedRatioVecsDate, + pub price_1m_ema: ComputedRatioVecsDate, + pub price_34d_ema: ComputedRatioVecsDate, + pub price_55d_ema: ComputedRatioVecsDate, + pub price_89d_ema: ComputedRatioVecsDate, + pub price_144d_ema: ComputedRatioVecsDate, + pub price_200d_ema: ComputedRatioVecsDate, + pub price_1y_ema: ComputedRatioVecsDate, + pub price_2y_ema: ComputedRatioVecsDate, + pub price_200w_ema: ComputedRatioVecsDate, + pub price_4y_ema: ComputedRatioVecsDate, - pub indexes_to_price_200d_sma_x2_4: LazyDateLast, - pub indexes_to_price_200d_sma_x0_8: LazyDateLast, - pub indexes_to_price_350d_sma_x2: LazyDateLast, + pub price_200d_sma_x2_4: LazyDateLast, + pub price_200d_sma_x0_8: LazyDateLast, + pub price_350d_sma_x2: LazyDateLast, } diff --git a/crates/brk_computer/src/market/range/compute.rs b/crates/brk_computer/src/market/range/compute.rs index e1b124fc3..2f4df1fb4 100644 --- a/crates/brk_computer/src/market/range/compute.rs +++ b/crates/brk_computer/src/market/range/compute.rs @@ -3,7 +3,7 @@ use brk_types::StoredF32; use vecdb::Exit; use super::Vecs; -use crate::{price, ComputeIndexes}; +use crate::{ComputeIndexes, price}; impl Vecs { pub fn compute( @@ -12,46 +12,46 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let open = &price.usd.timeindexes_to_price_open.dateindex; - let low = &price.usd.timeindexes_to_price_low.dateindex; - let high = &price.usd.timeindexes_to_price_high.dateindex; + let open = &price.usd.split.open.dateindex; + let low = &price.usd.split.low.dateindex; + let high = &price.usd.split.high.dateindex; - self.indexes_to_price_1w_min.compute_all(starting_indexes, exit, |v| { + self.price_1w_min.compute_all(starting_indexes, exit, |v| { v.compute_min(starting_indexes.dateindex, low, 7, exit)?; Ok(()) })?; - self.indexes_to_price_1w_max.compute_all(starting_indexes, exit, |v| { + self.price_1w_max.compute_all(starting_indexes, exit, |v| { v.compute_max(starting_indexes.dateindex, high, 7, exit)?; Ok(()) })?; - self.indexes_to_price_2w_min.compute_all(starting_indexes, exit, |v| { + self.price_2w_min.compute_all(starting_indexes, exit, |v| { v.compute_min(starting_indexes.dateindex, low, 14, exit)?; Ok(()) })?; - self.indexes_to_price_2w_max.compute_all(starting_indexes, exit, |v| { + self.price_2w_max.compute_all(starting_indexes, exit, |v| { v.compute_max(starting_indexes.dateindex, high, 14, exit)?; Ok(()) })?; - self.indexes_to_price_1m_min.compute_all(starting_indexes, exit, |v| { + self.price_1m_min.compute_all(starting_indexes, exit, |v| { v.compute_min(starting_indexes.dateindex, low, 30, exit)?; Ok(()) })?; - self.indexes_to_price_1m_max.compute_all(starting_indexes, exit, |v| { + self.price_1m_max.compute_all(starting_indexes, exit, |v| { v.compute_max(starting_indexes.dateindex, high, 30, exit)?; Ok(()) })?; - self.indexes_to_price_1y_min.compute_all(starting_indexes, exit, |v| { + self.price_1y_min.compute_all(starting_indexes, exit, |v| { v.compute_min(starting_indexes.dateindex, low, 365, exit)?; Ok(()) })?; - self.indexes_to_price_1y_max.compute_all(starting_indexes, exit, |v| { + self.price_1y_max.compute_all(starting_indexes, exit, |v| { v.compute_max(starting_indexes.dateindex, high, 365, exit)?; Ok(()) })?; @@ -77,14 +77,15 @@ impl Vecs { exit, )?; - self.indexes_to_price_2w_choppiness_index.compute_all(starting_indexes, exit, |v| { + self.price_2w_choppiness_index + .compute_all(starting_indexes, exit, |v| { let n = 14; let log10n = (n as f32).log10(); v.compute_transform3( starting_indexes.dateindex, &self.dateindex_to_price_true_range_2w_sum, - &self.indexes_to_price_2w_max.dateindex, - &self.indexes_to_price_2w_min.dateindex, + &self.price_2w_max.dateindex, + &self.price_2w_min.dateindex, |(i, tr_sum, max, min, ..)| { ( i, diff --git a/crates/brk_computer/src/market/range/import.rs b/crates/brk_computer/src/market/range/import.rs index d9b591634..e9256d23a 100644 --- a/crates/brk_computer/src/market/range/import.rs +++ b/crates/brk_computer/src/market/range/import.rs @@ -10,49 +10,49 @@ impl Vecs { let v1 = Version::ONE; Ok(Self { - indexes_to_price_1w_min: ComputedDateLast::forced_import( + price_1w_min: ComputedDateLast::forced_import( db, "price_1w_min", version + v1, indexes, )?, - indexes_to_price_1w_max: ComputedDateLast::forced_import( + price_1w_max: ComputedDateLast::forced_import( db, "price_1w_max", version + v1, indexes, )?, - indexes_to_price_2w_min: ComputedDateLast::forced_import( + price_2w_min: ComputedDateLast::forced_import( db, "price_2w_min", version + v1, indexes, )?, - indexes_to_price_2w_max: ComputedDateLast::forced_import( + price_2w_max: ComputedDateLast::forced_import( db, "price_2w_max", version + v1, indexes, )?, - indexes_to_price_1m_min: ComputedDateLast::forced_import( + price_1m_min: ComputedDateLast::forced_import( db, "price_1m_min", version + v1, indexes, )?, - indexes_to_price_1m_max: ComputedDateLast::forced_import( + price_1m_max: ComputedDateLast::forced_import( db, "price_1m_max", version + v1, indexes, )?, - indexes_to_price_1y_min: ComputedDateLast::forced_import( + price_1y_min: ComputedDateLast::forced_import( db, "price_1y_min", version + v1, indexes, )?, - indexes_to_price_1y_max: ComputedDateLast::forced_import( + price_1y_max: ComputedDateLast::forced_import( db, "price_1y_max", version + v1, @@ -68,7 +68,7 @@ impl Vecs { "price_true_range_2w_sum", version, )?, - indexes_to_price_2w_choppiness_index: ComputedDateLast::forced_import( + price_2w_choppiness_index: ComputedDateLast::forced_import( db, "price_2w_choppiness_index", version + v1, diff --git a/crates/brk_computer/src/market/range/vecs.rs b/crates/brk_computer/src/market/range/vecs.rs index 1980b06b5..4975e744c 100644 --- a/crates/brk_computer/src/market/range/vecs.rs +++ b/crates/brk_computer/src/market/range/vecs.rs @@ -7,15 +7,15 @@ use crate::internal::ComputedDateLast; /// Price range and choppiness metrics #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_price_1w_min: ComputedDateLast, - pub indexes_to_price_1w_max: ComputedDateLast, - pub indexes_to_price_2w_min: ComputedDateLast, - pub indexes_to_price_2w_max: ComputedDateLast, - pub indexes_to_price_1m_min: ComputedDateLast, - pub indexes_to_price_1m_max: ComputedDateLast, - pub indexes_to_price_1y_min: ComputedDateLast, - pub indexes_to_price_1y_max: ComputedDateLast, + pub price_1w_min: ComputedDateLast, + pub price_1w_max: ComputedDateLast, + pub price_2w_min: ComputedDateLast, + pub price_2w_max: ComputedDateLast, + pub price_1m_min: ComputedDateLast, + pub price_1m_max: ComputedDateLast, + pub price_1y_min: ComputedDateLast, + pub price_1y_max: ComputedDateLast, pub dateindex_to_price_true_range: EagerVec>, pub dateindex_to_price_true_range_2w_sum: EagerVec>, - pub indexes_to_price_2w_choppiness_index: ComputedDateLast, + pub price_2w_choppiness_index: ComputedDateLast, } diff --git a/crates/brk_computer/src/market/returns/compute.rs b/crates/brk_computer/src/market/returns/compute.rs index 9886af0e3..5ca50bf5b 100644 --- a/crates/brk_computer/src/market/returns/compute.rs +++ b/crates/brk_computer/src/market/returns/compute.rs @@ -11,7 +11,6 @@ impl Vecs { let price_returns_dca = self.price_returns.as_dca_period(); for (cagr, returns, days) in self.cagr.zip_mut_with_period(&price_returns_dca) { cagr.compute_all(starting_indexes, exit, |v| { - // KISS: dateindex is no longer Option v.compute_cagr( starting_indexes.dateindex, &returns.dateindex, @@ -22,27 +21,17 @@ impl Vecs { })?; } - // KISS: dateindex is no longer Option let _1d_price_returns_dateindex = &self.price_returns._1d.dateindex; - self.indexes_to_1d_returns_1w_sd.compute_all( - starting_indexes, - exit, - _1d_price_returns_dateindex, - )?; - self.indexes_to_1d_returns_1m_sd.compute_all( - starting_indexes, - exit, - _1d_price_returns_dateindex, - )?; - self.indexes_to_1d_returns_1y_sd.compute_all( - starting_indexes, - exit, - _1d_price_returns_dateindex, - )?; + self._1d_returns_1w_sd + .compute_all(starting_indexes, exit, _1d_price_returns_dateindex)?; + self._1d_returns_1m_sd + .compute_all(starting_indexes, exit, _1d_price_returns_dateindex)?; + self._1d_returns_1y_sd + .compute_all(starting_indexes, exit, _1d_price_returns_dateindex)?; // Downside returns: min(return, 0) - self.dateindex_to_downside_returns.compute_transform( + self.downside_returns.compute_transform( starting_indexes.dateindex, _1d_price_returns_dateindex, |(i, ret, ..)| (i, StoredF32::from((*ret).min(0.0))), @@ -50,21 +39,12 @@ impl Vecs { )?; // Downside deviation (SD of downside returns) - self.indexes_to_downside_1w_sd.compute_all( - starting_indexes, - exit, - &self.dateindex_to_downside_returns, - )?; - self.indexes_to_downside_1m_sd.compute_all( - starting_indexes, - exit, - &self.dateindex_to_downside_returns, - )?; - self.indexes_to_downside_1y_sd.compute_all( - starting_indexes, - exit, - &self.dateindex_to_downside_returns, - )?; + self.downside_1w_sd + .compute_all(starting_indexes, exit, &self.downside_returns)?; + self.downside_1m_sd + .compute_all(starting_indexes, exit, &self.downside_returns)?; + self.downside_1y_sd + .compute_all(starting_indexes, exit, &self.downside_returns)?; Ok(()) } diff --git a/crates/brk_computer/src/market/returns/import.rs b/crates/brk_computer/src/market/returns/import.rs index 07556cd87..9a64cbb3c 100644 --- a/crates/brk_computer/src/market/returns/import.rs +++ b/crates/brk_computer/src/market/returns/import.rs @@ -7,7 +7,7 @@ use super::Vecs; use crate::{ indexes, internal::{ - BinaryDateLast, ComputedDateLast, ComputedStandardDeviationVecsDate, + ComputedDateLast, ComputedStandardDeviationVecsDate, LazyBinaryDateLast, PercentageDiffCloseDollars, StandardDeviationVecsOptions, }, market::dca::ByDcaCagr, @@ -24,15 +24,14 @@ impl Vecs { ) -> Result { let v1 = Version::ONE; - // KISS: Price returns (lazy, from price.close and lookback.price_ago) let price_returns = LOOKBACK_PERIOD_NAMES .zip_ref(&lookback.price_ago) .map(|(name, price_ago)| { - BinaryDateLast::from_computed_both_last::( + LazyBinaryDateLast::from_computed_both_last::( &format!("{name}_price_returns"), version, - &price.usd.timeindexes_to_price_close, + &price.usd.split.close, price_ago, ) }); @@ -42,7 +41,7 @@ impl Vecs { ComputedDateLast::forced_import(db, &format!("{name}_cagr"), version, indexes) })?; - let indexes_to_1d_returns_1w_sd = ComputedStandardDeviationVecsDate::forced_import( + let _1d_returns_1w_sd = ComputedStandardDeviationVecsDate::forced_import( db, "1d_returns_1w_sd", 7, @@ -51,7 +50,7 @@ impl Vecs { StandardDeviationVecsOptions::default(), None, )?; - let indexes_to_1d_returns_1m_sd = ComputedStandardDeviationVecsDate::forced_import( + let _1d_returns_1m_sd = ComputedStandardDeviationVecsDate::forced_import( db, "1d_returns_1m_sd", 30, @@ -60,7 +59,7 @@ impl Vecs { StandardDeviationVecsOptions::default(), None, )?; - let indexes_to_1d_returns_1y_sd = ComputedStandardDeviationVecsDate::forced_import( + let _1d_returns_1y_sd = ComputedStandardDeviationVecsDate::forced_import( db, "1d_returns_1y_sd", 365, @@ -70,9 +69,8 @@ impl Vecs { None, )?; - let dateindex_to_downside_returns = - EagerVec::forced_import(db, "downside_returns", version)?; - let indexes_to_downside_1w_sd = ComputedStandardDeviationVecsDate::forced_import( + let downside_returns = EagerVec::forced_import(db, "downside_returns", version)?; + let downside_1w_sd = ComputedStandardDeviationVecsDate::forced_import( db, "downside_1w_sd", 7, @@ -81,7 +79,7 @@ impl Vecs { StandardDeviationVecsOptions::default(), None, )?; - let indexes_to_downside_1m_sd = ComputedStandardDeviationVecsDate::forced_import( + let downside_1m_sd = ComputedStandardDeviationVecsDate::forced_import( db, "downside_1m_sd", 30, @@ -90,7 +88,7 @@ impl Vecs { StandardDeviationVecsOptions::default(), None, )?; - let indexes_to_downside_1y_sd = ComputedStandardDeviationVecsDate::forced_import( + let downside_1y_sd = ComputedStandardDeviationVecsDate::forced_import( db, "downside_1y_sd", 365, @@ -103,15 +101,13 @@ impl Vecs { Ok(Self { price_returns, cagr, - - indexes_to_1d_returns_1w_sd, - indexes_to_1d_returns_1m_sd, - indexes_to_1d_returns_1y_sd, - - dateindex_to_downside_returns, - indexes_to_downside_1w_sd, - indexes_to_downside_1m_sd, - indexes_to_downside_1y_sd, + _1d_returns_1w_sd, + _1d_returns_1m_sd, + _1d_returns_1y_sd, + downside_returns, + downside_1w_sd, + downside_1m_sd, + downside_1y_sd, }) } } diff --git a/crates/brk_computer/src/market/returns/vecs.rs b/crates/brk_computer/src/market/returns/vecs.rs index ed38a27e9..55080358c 100644 --- a/crates/brk_computer/src/market/returns/vecs.rs +++ b/crates/brk_computer/src/market/returns/vecs.rs @@ -3,27 +3,26 @@ use brk_types::{Close, DateIndex, Dollars, StoredF32}; use vecdb::{EagerVec, PcoVec}; use crate::{ - internal::{BinaryDateLast, ComputedDateLast, ComputedStandardDeviationVecsDate}, + internal::{ComputedDateLast, ComputedStandardDeviationVecsDate, LazyBinaryDateLast}, market::{dca::ByDcaCagr, lookback::ByLookbackPeriod}, }; /// Price returns, CAGR, and returns standard deviation metrics #[derive(Clone, Traversable)] pub struct Vecs { - // KISS: Price returns (lazy, from price.close and lookback.price_ago) - pub price_returns: ByLookbackPeriod, Dollars>>, + pub price_returns: ByLookbackPeriod, Dollars>>, // CAGR (computed from returns, 2y+ only) pub cagr: ByDcaCagr>, // Returns standard deviation (computed from 1d returns) - pub indexes_to_1d_returns_1w_sd: ComputedStandardDeviationVecsDate, - pub indexes_to_1d_returns_1m_sd: ComputedStandardDeviationVecsDate, - pub indexes_to_1d_returns_1y_sd: ComputedStandardDeviationVecsDate, + pub _1d_returns_1w_sd: ComputedStandardDeviationVecsDate, + pub _1d_returns_1m_sd: ComputedStandardDeviationVecsDate, + pub _1d_returns_1y_sd: ComputedStandardDeviationVecsDate, // Downside returns and deviation (for Sortino ratio) - pub dateindex_to_downside_returns: EagerVec>, - pub indexes_to_downside_1w_sd: ComputedStandardDeviationVecsDate, - pub indexes_to_downside_1m_sd: ComputedStandardDeviationVecsDate, - pub indexes_to_downside_1y_sd: ComputedStandardDeviationVecsDate, + pub downside_returns: EagerVec>, + pub downside_1w_sd: ComputedStandardDeviationVecsDate, + pub downside_1m_sd: ComputedStandardDeviationVecsDate, + pub downside_1y_sd: ComputedStandardDeviationVecsDate, } diff --git a/crates/brk_computer/src/market/volatility/import.rs b/crates/brk_computer/src/market/volatility/import.rs index ae52d9a72..75de14d6f 100644 --- a/crates/brk_computer/src/market/volatility/import.rs +++ b/crates/brk_computer/src/market/volatility/import.rs @@ -4,55 +4,50 @@ use vecdb::{IterableCloneableVec, LazyVecFrom2}; use super::super::returns; use super::Vecs; use crate::internal::{ - LazyDateLast, RatioF32, StoredF32TimesSqrt30, StoredF32TimesSqrt365, - StoredF32TimesSqrt7, + LazyDateLast, RatioF32, StoredF32TimesSqrt7, StoredF32TimesSqrt30, StoredF32TimesSqrt365, }; impl Vecs { pub fn forced_import(version: Version, returns: &returns::Vecs) -> Self { let v2 = Version::TWO; - let indexes_to_price_1w_volatility = - LazyDateLast::from_source::( - "price_1w_volatility", - version + v2, - &returns.indexes_to_1d_returns_1w_sd.sd, - ); + let price_1w_volatility = LazyDateLast::from_source::( + "price_1w_volatility", + version + v2, + &returns._1d_returns_1w_sd.sd, + ); - let indexes_to_price_1m_volatility = - LazyDateLast::from_source::( - "price_1m_volatility", - version + v2, - &returns.indexes_to_1d_returns_1m_sd.sd, - ); + let price_1m_volatility = LazyDateLast::from_source::( + "price_1m_volatility", + version + v2, + &returns._1d_returns_1m_sd.sd, + ); - let indexes_to_price_1y_volatility = - LazyDateLast::from_source::( - "price_1y_volatility", - version + v2, - &returns.indexes_to_1d_returns_1y_sd.sd, - ); + let price_1y_volatility = LazyDateLast::from_source::( + "price_1y_volatility", + version + v2, + &returns._1d_returns_1y_sd.sd, + ); - // KISS: dateindex is no longer Option let dateindex_to_sharpe_1w = LazyVecFrom2::transformed::( "sharpe_1w", version + v2, returns.price_returns._1w.dateindex.boxed_clone(), - indexes_to_price_1w_volatility.dateindex.boxed_clone(), + price_1w_volatility.dateindex.boxed_clone(), ); let dateindex_to_sharpe_1m = LazyVecFrom2::transformed::( "sharpe_1m", version + v2, returns.price_returns._1m.dateindex.boxed_clone(), - indexes_to_price_1m_volatility.dateindex.boxed_clone(), + price_1m_volatility.dateindex.boxed_clone(), ); let dateindex_to_sharpe_1y = LazyVecFrom2::transformed::( "sharpe_1y", version + v2, returns.price_returns._1y.dateindex.boxed_clone(), - indexes_to_price_1y_volatility.dateindex.boxed_clone(), + price_1y_volatility.dateindex.boxed_clone(), ); // Sortino ratio = returns / downside volatility @@ -60,33 +55,33 @@ impl Vecs { "sortino_1w", version + v2, returns.price_returns._1w.dateindex.boxed_clone(), - returns.indexes_to_downside_1w_sd.sd.dateindex.boxed_clone(), + returns.downside_1w_sd.sd.dateindex.boxed_clone(), ); let dateindex_to_sortino_1m = LazyVecFrom2::transformed::( "sortino_1m", version + v2, returns.price_returns._1m.dateindex.boxed_clone(), - returns.indexes_to_downside_1m_sd.sd.dateindex.boxed_clone(), + returns.downside_1m_sd.sd.dateindex.boxed_clone(), ); let dateindex_to_sortino_1y = LazyVecFrom2::transformed::( "sortino_1y", version + v2, returns.price_returns._1y.dateindex.boxed_clone(), - returns.indexes_to_downside_1y_sd.sd.dateindex.boxed_clone(), + returns.downside_1y_sd.sd.dateindex.boxed_clone(), ); Self { - indexes_to_price_1w_volatility, - indexes_to_price_1m_volatility, - indexes_to_price_1y_volatility, - dateindex_to_sharpe_1w, - dateindex_to_sharpe_1m, - dateindex_to_sharpe_1y, - dateindex_to_sortino_1w, - dateindex_to_sortino_1m, - dateindex_to_sortino_1y, + price_1w_volatility, + price_1m_volatility, + price_1y_volatility, + sharpe_1w: dateindex_to_sharpe_1w, + sharpe_1m: dateindex_to_sharpe_1m, + sharpe_1y: dateindex_to_sharpe_1y, + sortino_1w: dateindex_to_sortino_1w, + sortino_1m: dateindex_to_sortino_1m, + sortino_1y: dateindex_to_sortino_1y, } } } diff --git a/crates/brk_computer/src/market/volatility/vecs.rs b/crates/brk_computer/src/market/volatility/vecs.rs index 304d32a05..60577e014 100644 --- a/crates/brk_computer/src/market/volatility/vecs.rs +++ b/crates/brk_computer/src/market/volatility/vecs.rs @@ -7,16 +7,15 @@ use crate::internal::LazyDateLast; /// Price volatility metrics (derived from returns standard deviation) #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_price_1w_volatility: LazyDateLast, - pub indexes_to_price_1m_volatility: LazyDateLast, - pub indexes_to_price_1y_volatility: LazyDateLast, + pub price_1w_volatility: LazyDateLast, + pub price_1m_volatility: LazyDateLast, + pub price_1y_volatility: LazyDateLast, - // KISS: now concrete since source is KISS - pub dateindex_to_sharpe_1w: LazyVecFrom2, - pub dateindex_to_sharpe_1m: LazyVecFrom2, - pub dateindex_to_sharpe_1y: LazyVecFrom2, + pub sharpe_1w: LazyVecFrom2, + pub sharpe_1m: LazyVecFrom2, + pub sharpe_1y: LazyVecFrom2, - pub dateindex_to_sortino_1w: LazyVecFrom2, - pub dateindex_to_sortino_1m: LazyVecFrom2, - pub dateindex_to_sortino_1y: LazyVecFrom2, + pub sortino_1w: LazyVecFrom2, + pub sortino_1m: LazyVecFrom2, + pub sortino_1y: LazyVecFrom2, } diff --git a/crates/brk_computer/src/outputs/count/compute.rs b/crates/brk_computer/src/outputs/count/compute.rs index 63ef59393..3b9e80946 100644 --- a/crates/brk_computer/src/outputs/count/compute.rs +++ b/crates/brk_computer/src/outputs/count/compute.rs @@ -4,7 +4,7 @@ use brk_types::{Height, StoredU64}; use vecdb::{Exit, TypedVecIterator}; use super::Vecs; -use crate::{indexes, inputs, scripts, ComputeIndexes}; +use crate::{ComputeIndexes, indexes, inputs, scripts}; impl Vecs { pub fn compute( @@ -16,32 +16,22 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_count.derive_from( + self.total_count.derive_from( indexer, indexes, starting_indexes, - &indexes.transaction.txindex_to_output_count, + &indexes.txindex.output_count, exit, )?; - self.indexes_to_utxo_count + self.utxo_count .compute_all(indexes, starting_indexes, exit, |v| { - let mut input_count_iter = inputs_count - .indexes_to_count - .height - .sum_cum - .cumulative - .0 - .into_iter(); - let mut opreturn_cumulative_iter = scripts_count - .indexes_to_opreturn_count - .rest - .height_cumulative - .0 - .into_iter(); + let mut input_count_iter = inputs_count.height.sum_cum.cumulative.0.into_iter(); + let mut opreturn_cumulative_iter = + scripts_count.opreturn.height_cumulative.0.into_iter(); v.compute_transform( starting_indexes.height, - &self.indexes_to_count.height.sum_cum.cumulative.0, + &self.total_count.height.sum_cum.cumulative.0, |(h, output_count, ..)| { let input_count = input_count_iter.get_unwrap(h); let opreturn_count = *opreturn_cumulative_iter.get_unwrap(h); diff --git a/crates/brk_computer/src/outputs/count/import.rs b/crates/brk_computer/src/outputs/count/import.rs index 035be476c..9360f2da9 100644 --- a/crates/brk_computer/src/outputs/count/import.rs +++ b/crates/brk_computer/src/outputs/count/import.rs @@ -10,23 +10,9 @@ use crate::{ impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { - let indexes_to_count = DerivedTxFull::forced_import( - db, - "output_count", - version, - indexes, - )?; - - let indexes_to_utxo_count = ComputedBlockFull::forced_import( - db, - "exact_utxo_count", - version, - indexes, - )?; - Ok(Self { - indexes_to_count, - indexes_to_utxo_count, + total_count: DerivedTxFull::forced_import(db, "output_count", version, indexes)?, + utxo_count: ComputedBlockFull::forced_import(db, "exact_utxo_count", version, indexes)?, }) } } diff --git a/crates/brk_computer/src/outputs/count/vecs.rs b/crates/brk_computer/src/outputs/count/vecs.rs index 902d3d7dd..0aaf6784b 100644 --- a/crates/brk_computer/src/outputs/count/vecs.rs +++ b/crates/brk_computer/src/outputs/count/vecs.rs @@ -5,6 +5,6 @@ use crate::internal::{ComputedBlockFull, DerivedTxFull}; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_count: DerivedTxFull, - pub indexes_to_utxo_count: ComputedBlockFull, + pub total_count: DerivedTxFull, + pub utxo_count: ComputedBlockFull, } diff --git a/crates/brk_computer/src/outputs/spent/compute.rs b/crates/brk_computer/src/outputs/spent/compute.rs index 7853ba9be..95a0184b1 100644 --- a/crates/brk_computer/src/outputs/spent/compute.rs +++ b/crates/brk_computer/src/outputs/spent/compute.rs @@ -1,11 +1,13 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{Height, TxInIndex, TxOutIndex}; -use log::info; -use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, Stamp, TypedVecIterator, VecIndex}; +use tracing::info; +use vecdb::{ + AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, Stamp, TypedVecIterator, VecIndex, +}; use super::Vecs; -use crate::{inputs, ComputeIndexes}; +use crate::{ComputeIndexes, inputs}; const HEIGHT_BATCH: u32 = 10_000; @@ -18,26 +20,25 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let target_height = indexer.vecs.block.height_to_blockhash.len(); + let target_height = indexer.vecs.blocks.blockhash.len(); if target_height == 0 { return Ok(()); } let target_height = Height::from(target_height - 1); // Find min_height from current vec length - let current_txoutindex = self.txoutindex_to_txinindex.len(); + let current_txoutindex = self.txinindex.len(); let min_txoutindex = current_txoutindex.min(starting_indexes.txoutindex.to_usize()); let starting_stamp = Stamp::from(starting_indexes.height); - let _ = self.txoutindex_to_txinindex.rollback_before(starting_stamp); + let _ = self.txinindex.rollback_before(starting_stamp); - self.txoutindex_to_txinindex + self.txinindex .truncate_if_needed(TxOutIndex::from(min_txoutindex))?; - let mut height_to_first_txoutindex = - indexer.vecs.txout.height_to_first_txoutindex.iter()?; - let mut height_to_first_txinindex = indexer.vecs.txin.height_to_first_txinindex.iter()?; - let mut txinindex_to_txoutindex = inputs.spent.txinindex_to_txoutindex.iter()?; + let mut height_to_first_txoutindex = indexer.vecs.outputs.first_txoutindex.iter()?; + let mut height_to_first_txinindex = indexer.vecs.inputs.first_txinindex.iter()?; + let mut txinindex_to_txoutindex = inputs.spent.txoutindex.iter()?; // Find starting height from min_txoutindex let mut min_height = Height::ZERO; @@ -65,13 +66,13 @@ impl Vecs { // Fill txoutindex up to batch_end_height + 1 let batch_txoutindex = if batch_end_height >= target_height { - indexer.vecs.txout.txoutindex_to_value.len() + indexer.vecs.outputs.value.len() } else { height_to_first_txoutindex .get_unwrap(batch_end_height + 1_u32) .to_usize() }; - self.txoutindex_to_txinindex + self.txinindex .fill_to(batch_txoutindex, TxInIndex::UNSPENT)?; // Get txin range for this height batch @@ -79,7 +80,7 @@ impl Vecs { .get_unwrap(batch_start_height) .to_usize(); let txin_end = if batch_end_height >= target_height { - inputs.spent.txinindex_to_txoutindex.len() + inputs.spent.txoutindex.len() } else { height_to_first_txinindex .get_unwrap(batch_end_height + 1_u32) @@ -102,12 +103,12 @@ impl Vecs { pairs.sort_unstable_by_key(|(txoutindex, _)| *txoutindex); for &(txoutindex, txinindex) in &pairs { - self.txoutindex_to_txinindex.update(txoutindex, txinindex)?; + self.txinindex.update(txoutindex, txinindex)?; } if batch_end_height < target_height { let _lock = exit.lock(); - self.txoutindex_to_txinindex.write()?; + self.txinindex.write()?; info!( "TxOuts: {:.2}%", batch_end_height.to_usize() as f64 / target_height.to_usize() as f64 * 100.0 @@ -119,7 +120,7 @@ impl Vecs { } let _lock = exit.lock(); - self.txoutindex_to_txinindex + self.txinindex .stamped_write_with_changes(Stamp::from(target_height))?; db.flush()?; diff --git a/crates/brk_computer/src/outputs/spent/import.rs b/crates/brk_computer/src/outputs/spent/import.rs index 205d100da..c99012ed1 100644 --- a/crates/brk_computer/src/outputs/spent/import.rs +++ b/crates/brk_computer/src/outputs/spent/import.rs @@ -7,7 +7,7 @@ use super::Vecs; impl Vecs { pub fn forced_import(db: &Database, version: Version) -> Result { Ok(Self { - txoutindex_to_txinindex: BytesVec::forced_import(db, "txinindex", version)?, + txinindex: BytesVec::forced_import(db, "txinindex", version)?, }) } } diff --git a/crates/brk_computer/src/outputs/spent/vecs.rs b/crates/brk_computer/src/outputs/spent/vecs.rs index 41543ec83..ffc2c5865 100644 --- a/crates/brk_computer/src/outputs/spent/vecs.rs +++ b/crates/brk_computer/src/outputs/spent/vecs.rs @@ -4,5 +4,5 @@ use vecdb::BytesVec; #[derive(Clone, Traversable)] pub struct Vecs { - pub txoutindex_to_txinindex: BytesVec, + pub txinindex: BytesVec, } diff --git a/crates/brk_computer/src/pools/mod.rs b/crates/brk_computer/src/pools/mod.rs index b8f900408..24773458b 100644 --- a/crates/brk_computer/src/pools/mod.rs +++ b/crates/brk_computer/src/pools/mod.rs @@ -80,10 +80,11 @@ impl Vecs { &mut self, indexer: &Indexer, indexes: &indexes::Vecs, + blocks: &blocks::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.compute_(indexer, indexes, starting_indexes, exit)?; + self.compute_(indexer, indexes, blocks, starting_indexes, exit)?; let _lock = exit.lock(); self.db.compact()?; Ok(()) @@ -93,13 +94,14 @@ impl Vecs { &mut self, indexer: &Indexer, indexes: &indexes::Vecs, + blocks: &blocks::Vecs, starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { self.compute_height_to_pool(indexer, indexes, starting_indexes, exit)?; self.vecs.par_iter_mut().try_for_each(|(_, vecs)| { - vecs.compute(indexes, starting_indexes, &self.height_to_pool, exit) + vecs.compute(indexes, starting_indexes, &self.height_to_pool, blocks, exit) })?; Ok(()) @@ -115,44 +117,44 @@ impl Vecs { self.height_to_pool .validate_computed_version_or_reset(indexer.stores.height_to_coinbase_tag.version())?; - let mut height_to_first_txindex_iter = indexer.vecs.tx.height_to_first_txindex.iter()?; + let mut height_to_first_txindex_iter = indexer.vecs.transactions.first_txindex.iter()?; let mut txindex_to_first_txoutindex_iter = - indexer.vecs.tx.txindex_to_first_txoutindex.iter()?; - let mut txindex_to_output_count_iter = indexes.transaction.txindex_to_output_count.iter(); + indexer.vecs.transactions.first_txoutindex.iter()?; + let mut txindex_to_output_count_iter = indexes.txindex.output_count.iter(); let mut txoutindex_to_outputtype_iter = - indexer.vecs.txout.txoutindex_to_outputtype.iter()?; - let mut txoutindex_to_typeindex_iter = indexer.vecs.txout.txoutindex_to_typeindex.iter()?; + indexer.vecs.outputs.outputtype.iter()?; + let mut txoutindex_to_typeindex_iter = indexer.vecs.outputs.typeindex.iter()?; let mut p2pk65addressindex_to_p2pk65bytes_iter = indexer .vecs - .address - .p2pk65addressindex_to_p2pk65bytes + .addresses + .p2pk65bytes .iter()?; let mut p2pk33addressindex_to_p2pk33bytes_iter = indexer .vecs - .address - .p2pk33addressindex_to_p2pk33bytes + .addresses + .p2pk33bytes .iter()?; let mut p2pkhaddressindex_to_p2pkhbytes_iter = indexer .vecs - .address - .p2pkhaddressindex_to_p2pkhbytes + .addresses + .p2pkhbytes .iter()?; let mut p2shaddressindex_to_p2shbytes_iter = - indexer.vecs.address.p2shaddressindex_to_p2shbytes.iter()?; + indexer.vecs.addresses.p2shbytes.iter()?; let mut p2wpkhaddressindex_to_p2wpkhbytes_iter = indexer .vecs - .address - .p2wpkhaddressindex_to_p2wpkhbytes + .addresses + .p2wpkhbytes .iter()?; let mut p2wshaddressindex_to_p2wshbytes_iter = indexer .vecs - .address - .p2wshaddressindex_to_p2wshbytes + .addresses + .p2wshbytes .iter()?; let mut p2traddressindex_to_p2trbytes_iter = - indexer.vecs.address.p2traddressindex_to_p2trbytes.iter()?; + indexer.vecs.addresses.p2trbytes.iter()?; let mut p2aaddressindex_to_p2abytes_iter = - indexer.vecs.address.p2aaddressindex_to_p2abytes.iter()?; + indexer.vecs.addresses.p2abytes.iter()?; let unknown = self.pools.get_unknown(); diff --git a/crates/brk_computer/src/pools/vecs.rs b/crates/brk_computer/src/pools/vecs.rs index b43054428..58f0f5664 100644 --- a/crates/brk_computer/src/pools/vecs.rs +++ b/crates/brk_computer/src/pools/vecs.rs @@ -2,17 +2,16 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Height, PoolSlug, Sats, StoredF32, StoredU16, StoredU32}; use vecdb::{ - Database, Exit, GenericStoredVec, IterableCloneableVec, IterableVec, LazyVecFrom2, VecIndex, - Version, + Database, Exit, GenericStoredVec, IterableCloneableVec, IterableVec, VecIndex, Version, }; use crate::{ blocks, indexes::{self, ComputeIndexes}, internal::{ - BinaryBlockSumCum, BinaryDateLast, ComputedBlockSumCum, ComputedDateLast, - DerivedValueBlockSumCum, DollarsPlus, MaskSats, PercentageU32F32, SatsPlus, - SatsPlusToBitcoin, ValueBinaryBlock, + ComputedBlockLast, ComputedBlockSumCum, ComputedDateLast, DollarsPlus, LazyBinaryBlockLast, + LazyValueBlockSumCum, MaskSats, PercentageU32F32, SatsPlus, SatsPlusToBitcoin, + ValueBinaryBlock, }, price, transactions, }; @@ -21,22 +20,21 @@ use crate::{ pub struct Vecs { slug: PoolSlug, - pub indexes_to_blocks_mined: ComputedBlockSumCum, - pub indexes_to_1w_blocks_mined: ComputedDateLast, - pub indexes_to_1m_blocks_mined: ComputedDateLast, - pub indexes_to_1y_blocks_mined: ComputedDateLast, - pub height_to_subsidy: LazyVecFrom2, - pub height_to_fee: LazyVecFrom2, - pub indexes_to_subsidy: DerivedValueBlockSumCum, - pub indexes_to_fee: DerivedValueBlockSumCum, - pub indexes_to_coinbase: ValueBinaryBlock, - pub indexes_to_dominance: BinaryBlockSumCum, - pub indexes_to_1d_dominance: BinaryBlockSumCum, - // KISS: both sources are ComputedVecsDateLast - pub indexes_to_1w_dominance: BinaryDateLast, - pub indexes_to_1m_dominance: BinaryDateLast, - pub indexes_to_1y_dominance: BinaryDateLast, - pub indexes_to_days_since_block: ComputedDateLast, + pub blocks_mined: ComputedBlockSumCum, + pub _24h_blocks_mined: ComputedBlockLast, + pub _1w_blocks_mined: ComputedBlockLast, + pub _1m_blocks_mined: ComputedBlockLast, + pub _1y_blocks_mined: ComputedBlockLast, + pub subsidy: LazyValueBlockSumCum, + pub fee: LazyValueBlockSumCum, + pub coinbase: ValueBinaryBlock, + pub dominance: LazyBinaryBlockLast, + + pub _24h_dominance: LazyBinaryBlockLast, + pub _1w_dominance: LazyBinaryBlockLast, + pub _1m_dominance: LazyBinaryBlockLast, + pub _1y_dominance: LazyBinaryBlockLast, + pub days_since_block: ComputedDateLast, } impl Vecs { @@ -52,115 +50,85 @@ impl Vecs { let suffix = |s: &str| format!("{}_{s}", slug); let version = parent_version; - let indexes_to_blocks_mined = + let blocks_mined = ComputedBlockSumCum::forced_import(db, &suffix("blocks_mined"), version, indexes)?; - let indexes_to_1w_blocks_mined = - ComputedDateLast::forced_import(db, &suffix("1w_blocks_mined"), version, indexes)?; - let indexes_to_1m_blocks_mined = - ComputedDateLast::forced_import(db, &suffix("1m_blocks_mined"), version, indexes)?; - let indexes_to_1y_blocks_mined = - ComputedDateLast::forced_import(db, &suffix("1y_blocks_mined"), version, indexes)?; + let _24h_blocks_mined = + ComputedBlockLast::forced_import(db, &suffix("24h_blocks_mined"), version, indexes)?; + let _1w_blocks_mined = + ComputedBlockLast::forced_import(db, &suffix("1w_blocks_mined"), version, indexes)?; + let _1m_blocks_mined = + ComputedBlockLast::forced_import(db, &suffix("1m_blocks_mined"), version, indexes)?; + let _1y_blocks_mined = + ComputedBlockLast::forced_import(db, &suffix("1y_blocks_mined"), version, indexes)?; - // KISS: height is now a concrete field (no Option) - let height_to_subsidy = LazyVecFrom2::transformed::( - &suffix("height_subsidy"), - version, - indexes_to_blocks_mined.height.boxed_clone(), - blocks.rewards.indexes_to_subsidy.sats.height.boxed_clone(), - ); - - let indexes_to_subsidy = DerivedValueBlockSumCum::forced_import( + let subsidy = LazyValueBlockSumCum::forced_import::( db, &suffix("subsidy"), version, indexes, - height_to_subsidy.boxed_clone(), + blocks_mined.height.boxed_clone(), + blocks.rewards.subsidy.sats.height.boxed_clone(), price, )?; - // KISS: height.sum_cum.sum.0 is now a concrete field - let height_to_fee = LazyVecFrom2::transformed::( - &suffix("height_fee"), - version, - indexes_to_blocks_mined.height.boxed_clone(), - transactions - .fees - .indexes_to_fee - .sats - .height - .sum_cum - .sum - .0 - .boxed_clone(), - ); - - let indexes_to_fee = DerivedValueBlockSumCum::forced_import( + let fee = LazyValueBlockSumCum::forced_import::( db, &suffix("fee"), version, indexes, - height_to_fee.boxed_clone(), + blocks_mined.height.boxed_clone(), + transactions.fees.fee.sats.height.sum_cum.sum.0.boxed_clone(), price, )?; Ok(Self { - indexes_to_dominance: BinaryBlockSumCum::from_computed::( + dominance: LazyBinaryBlockLast::from_computed_sum_cum::( &suffix("dominance"), version, - indexes_to_blocks_mined.height.boxed_clone(), - blocks.count.indexes_to_block_count.height.boxed_clone(), - &indexes_to_blocks_mined, - &blocks.count.indexes_to_block_count, + &blocks_mined, + &blocks.count.block_count, ), - indexes_to_1d_dominance: BinaryBlockSumCum::from_computed::( - &suffix("1d_dominance"), + _24h_dominance: LazyBinaryBlockLast::from_computed_last::( + &suffix("24h_dominance"), version, - indexes_to_blocks_mined.height.boxed_clone(), - blocks.count.indexes_to_block_count.height.boxed_clone(), - &indexes_to_blocks_mined, - &blocks.count.indexes_to_block_count, + &_24h_blocks_mined, + &blocks.count._24h_block_count, ), - indexes_to_1w_dominance: BinaryDateLast::from_computed_both_last::( + _1w_dominance: LazyBinaryBlockLast::from_computed_last::( &suffix("1w_dominance"), version, - &indexes_to_1w_blocks_mined, - &blocks.count.indexes_to_1w_block_count, + &_1w_blocks_mined, + &blocks.count._1w_block_count, ), - indexes_to_1m_dominance: BinaryDateLast::from_computed_both_last::( + _1m_dominance: LazyBinaryBlockLast::from_computed_last::( &suffix("1m_dominance"), version, - &indexes_to_1m_blocks_mined, - &blocks.count.indexes_to_1m_block_count, + &_1m_blocks_mined, + &blocks.count._1m_block_count, ), - indexes_to_1y_dominance: BinaryDateLast::from_computed_both_last::( + _1y_dominance: LazyBinaryBlockLast::from_computed_last::( &suffix("1y_dominance"), version, - &indexes_to_1y_blocks_mined, - &blocks.count.indexes_to_1y_block_count, + &_1y_blocks_mined, + &blocks.count._1y_block_count, ), slug, - indexes_to_blocks_mined, - indexes_to_1w_blocks_mined, - indexes_to_1m_blocks_mined, - indexes_to_1y_blocks_mined, - indexes_to_coinbase: ValueBinaryBlock::from_derived::< + blocks_mined, + _24h_blocks_mined, + _1w_blocks_mined, + _1m_blocks_mined, + _1y_blocks_mined, + coinbase: ValueBinaryBlock::from_lazy::< SatsPlus, SatsPlusToBitcoin, DollarsPlus, - >( - &suffix("coinbase"), - version, - height_to_subsidy.boxed_clone(), - height_to_fee.boxed_clone(), - &indexes_to_subsidy, - &indexes_to_fee, - ), - height_to_subsidy, - height_to_fee, - indexes_to_subsidy, - indexes_to_fee, - indexes_to_days_since_block: ComputedDateLast::forced_import( + StoredU32, + Sats, + >(&suffix("coinbase"), version, &subsidy, &fee), + subsidy, + fee, + days_since_block: ComputedDateLast::forced_import( db, &suffix("days_since_block"), version, @@ -174,9 +142,10 @@ impl Vecs { indexes: &indexes::Vecs, starting_indexes: &ComputeIndexes, height_to_pool: &impl IterableVec, + blocks: &blocks::Vecs, exit: &Exit, ) -> Result<()> { - self.indexes_to_blocks_mined + self.blocks_mined .compute_all(indexes, starting_indexes, exit, |vec| { vec.compute_transform( starting_indexes.height, @@ -196,56 +165,59 @@ impl Vecs { Ok(()) })?; - self.indexes_to_1w_blocks_mined - .compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - self.indexes_to_blocks_mined.dateindex.sum.inner(), - 7, + // Compute rolling window blocks mined using the start heights from blocks.count + let blocks_mined_height = &self.blocks_mined.height.clone(); + self._24h_blocks_mined + .compute_all(indexes, starting_indexes, exit, |v| { + Ok(v.compute_rolling_sum( + starting_indexes.height, + &blocks.count._24h_start, + blocks_mined_height, exit, - )?; - Ok(()) + )?) })?; - self.indexes_to_1m_blocks_mined - .compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - self.indexes_to_blocks_mined.dateindex.sum.inner(), - 30, + self._1w_blocks_mined + .compute_all(indexes, starting_indexes, exit, |v| { + Ok(v.compute_rolling_sum( + starting_indexes.height, + &blocks.count._1w_start, + blocks_mined_height, exit, - )?; - Ok(()) + )?) })?; - self.indexes_to_1y_blocks_mined - .compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - self.indexes_to_blocks_mined.dateindex.sum.inner(), - 365, + self._1m_blocks_mined + .compute_all(indexes, starting_indexes, exit, |v| { + Ok(v.compute_rolling_sum( + starting_indexes.height, + &blocks.count._1m_start, + blocks_mined_height, exit, - )?; - Ok(()) + )?) })?; - self.indexes_to_subsidy.derive_from( - indexes, - starting_indexes, - &self.height_to_subsidy, - exit, - )?; + self._1y_blocks_mined + .compute_all(indexes, starting_indexes, exit, |v| { + Ok(v.compute_rolling_sum( + starting_indexes.height, + &blocks.count._1y_start, + blocks_mined_height, + exit, + )?) + })?; - self.indexes_to_fee - .derive_from(indexes, starting_indexes, &self.height_to_fee, exit)?; + self.subsidy.derive_from(indexes, starting_indexes, exit)?; - self.indexes_to_days_since_block + self.fee.derive_from(indexes, starting_indexes, exit)?; + + self.days_since_block .compute_all(starting_indexes, exit, |v| { let mut prev = None; v.compute_transform2( starting_indexes.dateindex, - self.indexes_to_blocks_mined.dateindex.sum.inner(), - self.indexes_to_blocks_mined.dateindex.cumulative.inner(), + self.blocks_mined.dateindex.sum.inner(), + self.blocks_mined.dateindex.cumulative.inner(), |(i, sum, cumulative, slf)| { if prev.is_none() { let i = i.to_usize(); diff --git a/crates/brk_computer/src/positions.rs b/crates/brk_computer/src/positions.rs index d9c6d4dcb..07edeb5cc 100644 --- a/crates/brk_computer/src/positions.rs +++ b/crates/brk_computer/src/positions.rs @@ -18,8 +18,8 @@ pub const DB_NAME: &str = "positions"; pub struct Vecs { db: Database, - pub height_to_position: PcoVec, - pub txindex_to_position: PcoVec, + pub block_position: PcoVec, + pub tx_position: PcoVec, } impl Vecs { @@ -30,8 +30,8 @@ impl Vecs { let version = parent_version; let this = Self { - height_to_position: PcoVec::forced_import(&db, "position", version + Version::TWO)?, - txindex_to_position: PcoVec::forced_import(&db, "position", version + Version::TWO)?, + block_position: PcoVec::forced_import(&db, "position", version + Version::TWO)?, + tx_position: PcoVec::forced_import(&db, "position", version + Version::TWO)?, db, }; @@ -67,20 +67,19 @@ impl Vecs { exit: &Exit, ) -> Result<()> { // Validate computed versions against dependencies - let dep_version = indexer.vecs.tx.height_to_first_txindex.version() - + indexer.vecs.tx.txindex_to_height.version(); - self.height_to_position + let dep_version = indexer.vecs.transactions.first_txindex.version() + + indexer.vecs.transactions.height.version(); + self.block_position .validate_computed_version_or_reset(dep_version)?; - self.txindex_to_position + self.tx_position .validate_computed_version_or_reset(dep_version)?; - let min_txindex = - TxIndex::from(self.txindex_to_position.len()).min(starting_indexes.txindex); + let min_txindex = TxIndex::from(self.tx_position.len()).min(starting_indexes.txindex); let Some(min_height) = indexer .vecs - .tx - .txindex_to_height + .transactions + .height .iter()? .get(min_txindex) .map(|h| h.min(starting_indexes.height)) @@ -88,18 +87,18 @@ impl Vecs { return Ok(()); }; - let mut height_to_first_txindex_iter = indexer.vecs.tx.height_to_first_txindex.iter()?; + let mut height_to_first_txindex_iter = indexer.vecs.transactions.first_txindex.iter()?; parser .read( Some(min_height), - Some((indexer.vecs.tx.height_to_first_txindex.len() - 1).into()), + Some((indexer.vecs.transactions.first_txindex.len() - 1).into()), ) .iter() .try_for_each(|block| -> Result<()> { let height = block.height(); - self.height_to_position + self.block_position .truncate_push(height, block.metadata().position())?; let txindex = height_to_first_txindex_iter.get_unwrap(height); @@ -107,7 +106,7 @@ impl Vecs { block.tx_metadata().iter().enumerate().try_for_each( |(index, metadata)| -> Result<()> { let txindex = txindex + index; - self.txindex_to_position + self.tx_position .truncate_push(txindex, metadata.position())?; Ok(()) }, @@ -115,16 +114,16 @@ impl Vecs { if *height % 1_000 == 0 { let _lock = exit.lock(); - self.height_to_position.flush()?; - self.txindex_to_position.flush()?; + self.block_position.flush()?; + self.tx_position.flush()?; } Ok(()) })?; let _lock = exit.lock(); - self.height_to_position.flush()?; - self.txindex_to_position.flush()?; + self.block_position.flush()?; + self.tx_position.flush()?; Ok(()) } diff --git a/crates/brk_computer/src/price/cents/import.rs b/crates/brk_computer/src/price/cents/import.rs new file mode 100644 index 000000000..16f3d885f --- /dev/null +++ b/crates/brk_computer/src/price/cents/import.rs @@ -0,0 +1,73 @@ +use brk_error::Result; +use brk_types::{DateIndex, Height, OHLCCents, Version}; +use vecdb::{Database, IterableCloneableVec, LazyVecFrom1}; + +use super::Vecs; +use crate::internal::{HeightDateBytes, HeightDateLazyOHLC, LazyOHLC}; + +impl Vecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + let ohlc: HeightDateBytes = + HeightDateBytes::forced_import(db, "ohlc_cents", version)?; + + let components = HeightDateLazyOHLC { + height: LazyOHLC { + open: LazyVecFrom1::init( + "price_open_cents", + version, + ohlc.height.boxed_clone(), + |h: Height, iter| iter.get(h).map(|o: OHLCCents| o.open), + ), + high: LazyVecFrom1::init( + "price_high_cents", + version, + ohlc.height.boxed_clone(), + |h: Height, iter| iter.get(h).map(|o: OHLCCents| o.high), + ), + low: LazyVecFrom1::init( + "price_low_cents", + version, + ohlc.height.boxed_clone(), + |h: Height, iter| iter.get(h).map(|o: OHLCCents| o.low), + ), + close: LazyVecFrom1::init( + "price_close_cents", + version, + ohlc.height.boxed_clone(), + |h: Height, iter| iter.get(h).map(|o: OHLCCents| o.close), + ), + }, + dateindex: LazyOHLC { + open: LazyVecFrom1::init( + "price_open_cents", + version, + ohlc.dateindex.boxed_clone(), + |di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| o.open), + ), + high: LazyVecFrom1::init( + "price_high_cents", + version, + ohlc.dateindex.boxed_clone(), + |di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| o.high), + ), + low: LazyVecFrom1::init( + "price_low_cents", + version, + ohlc.dateindex.boxed_clone(), + |di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| o.low), + ), + close: LazyVecFrom1::init( + "price_close_cents", + version, + ohlc.dateindex.boxed_clone(), + |di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| o.close), + ), + }, + }; + + Ok(Self { + split: components, + ohlc, + }) + } +} diff --git a/crates/brk_computer/src/indexes/address/mod.rs b/crates/brk_computer/src/price/cents/mod.rs similarity index 100% rename from crates/brk_computer/src/indexes/address/mod.rs rename to crates/brk_computer/src/price/cents/mod.rs diff --git a/crates/brk_computer/src/price/cents/vecs.rs b/crates/brk_computer/src/price/cents/vecs.rs new file mode 100644 index 000000000..ee5a22dbf --- /dev/null +++ b/crates/brk_computer/src/price/cents/vecs.rs @@ -0,0 +1,10 @@ +use brk_traversable::Traversable; +use brk_types::{Cents, OHLCCents}; + +use crate::internal::{HeightDateBytes, HeightDateLazyOHLC}; + +#[derive(Clone, Traversable)] +pub struct Vecs { + pub split: HeightDateLazyOHLC, + pub ohlc: HeightDateBytes, +} diff --git a/crates/brk_computer/src/price/compute.rs b/crates/brk_computer/src/price/compute.rs index 0401ce347..6a7c15cd5 100644 --- a/crates/brk_computer/src/price/compute.rs +++ b/crates/brk_computer/src/price/compute.rs @@ -6,7 +6,7 @@ use crate::ComputeIndexes; impl Vecs { pub fn compute(&mut self, starting_indexes: &ComputeIndexes, exit: &Exit) -> Result<()> { - self.usd.compute(starting_indexes, exit)?; + self.usd.compute(starting_indexes, &self.cents, exit)?; self.sats.compute(starting_indexes, &self.usd, exit)?; diff --git a/crates/brk_computer/src/price/fetch.rs b/crates/brk_computer/src/price/fetch.rs index 795ebaaaf..290aa3b46 100644 --- a/crates/brk_computer/src/price/fetch.rs +++ b/crates/brk_computer/src/price/fetch.rs @@ -1,9 +1,11 @@ use brk_error::Result; use brk_indexer::Indexer; use brk_types::{DateIndex, Height, OHLCCents}; -use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex}; +use vecdb::{ + AnyStoredVec, AnyVec, Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex, +}; -use crate::{indexes, utils::OptionExt, ComputeIndexes}; +use crate::{ComputeIndexes, indexes, utils::OptionExt}; use super::Vecs; @@ -20,20 +22,22 @@ impl Vecs { }; // Validate computed versions against dependencies - let height_dep_version = indexer.vecs.block.height_to_timestamp.version(); - self.ohlc - .height_to_ohlc_in_cents + let height_dep_version = indexer.vecs.blocks.timestamp.version(); + self.cents + .ohlc + .height .validate_computed_version_or_reset(height_dep_version)?; - let dateindex_dep_version = indexes.time.dateindex_to_date.version(); - self.ohlc - .dateindex_to_ohlc_in_cents + let dateindex_dep_version = indexes.dateindex.date.version(); + self.cents + .ohlc + .dateindex .validate_computed_version_or_reset(dateindex_dep_version)?; - let height_to_timestamp = &indexer.vecs.block.height_to_timestamp; + let height_to_timestamp = &indexer.vecs.blocks.timestamp; let index = starting_indexes .height - .min(Height::from(self.ohlc.height_to_ohlc_in_cents.len())); + .min(Height::from(self.cents.ohlc.height.len())); let mut prev_timestamp = index .decremented() .map(|prev_i| height_to_timestamp.iter().unwrap().get_unwrap(prev_i)); @@ -42,38 +46,32 @@ impl Vecs { .enumerate() .skip(index.to_usize()) .try_for_each(|(i, v)| -> Result<()> { - self.ohlc.height_to_ohlc_in_cents.truncate_push_at( + self.cents.ohlc.height.truncate_push_at( i, - fetcher - .get_height(i.into(), v, prev_timestamp) - .unwrap(), + fetcher.get_height(i.into(), v, prev_timestamp).unwrap(), )?; prev_timestamp = Some(v); Ok(()) })?; { let _lock = exit.lock(); - self.ohlc.height_to_ohlc_in_cents.write()?; + self.cents.ohlc.height.write()?; } let index = starting_indexes .dateindex - .min(DateIndex::from(self.ohlc.dateindex_to_ohlc_in_cents.len())); + .min(DateIndex::from(self.cents.ohlc.dateindex.len())); let mut prev = Some(index.decremented().map_or(OHLCCents::default(), |prev_i| { - self.ohlc - .dateindex_to_ohlc_in_cents - .iter() - .unwrap() - .get_unwrap(prev_i) + self.cents.ohlc.dateindex.iter().unwrap().get_unwrap(prev_i) })); indexes - .time - .dateindex_to_date + .dateindex + .date .iter() .enumerate() .skip(index.to_usize()) .try_for_each(|(i, d)| -> Result<()> { - let ohlc = if i.to_usize() + 100 >= self.ohlc.dateindex_to_ohlc_in_cents.len() + let ohlc = if i.to_usize() + 100 >= self.cents.ohlc.dateindex.len() && let Ok(mut ohlc) = fetcher.get_date(d) { let prev_open = *prev.u().close; @@ -87,15 +85,13 @@ impl Vecs { prev.replace(ohlc.clone()); - self.ohlc - .dateindex_to_ohlc_in_cents - .truncate_push_at(i, ohlc)?; + self.cents.ohlc.dateindex.truncate_push_at(i, ohlc)?; Ok(()) })?; { let _lock = exit.lock(); - self.ohlc.dateindex_to_ohlc_in_cents.write()?; + self.cents.ohlc.dateindex.write()?; } Ok(()) diff --git a/crates/brk_computer/src/price/mod.rs b/crates/brk_computer/src/price/mod.rs index aec9a0a2e..26310613f 100644 --- a/crates/brk_computer/src/price/mod.rs +++ b/crates/brk_computer/src/price/mod.rs @@ -1,11 +1,11 @@ mod compute; mod fetch; -pub mod ohlc; +pub mod cents; pub mod sats; pub mod usd; -pub use ohlc::Vecs as OhlcVecs; +pub use cents::Vecs as CentsVecs; pub use sats::Vecs as SatsVecs; pub use usd::Vecs as UsdVecs; @@ -28,7 +28,7 @@ pub struct Vecs { #[traversable(skip)] pub(crate) fetcher: Option, - pub ohlc: OhlcVecs, + pub cents: CentsVecs, pub usd: UsdVecs, pub sats: SatsVecs, } @@ -61,14 +61,14 @@ impl Vecs { indexes: &indexes::Vecs, fetcher: Option, ) -> brk_error::Result { - let ohlc = OhlcVecs::forced_import(db, version)?; - let usd = UsdVecs::forced_import(db, version, indexes, &ohlc)?; + let cents = CentsVecs::forced_import(db, version)?; + let usd = UsdVecs::forced_import(db, version, indexes)?; let sats = SatsVecs::forced_import(db, version, indexes)?; Ok(Self { db: db.clone(), fetcher, - ohlc, + cents, usd, sats, }) diff --git a/crates/brk_computer/src/price/ohlc/import.rs b/crates/brk_computer/src/price/ohlc/import.rs deleted file mode 100644 index d7cffd70d..000000000 --- a/crates/brk_computer/src/price/ohlc/import.rs +++ /dev/null @@ -1,22 +0,0 @@ -use brk_error::Result; -use brk_types::Version; -use vecdb::{BytesVec, Database, ImportableVec}; - -use super::Vecs; - -impl Vecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - Ok(Self { - dateindex_to_ohlc_in_cents: BytesVec::forced_import( - db, - "ohlc_in_cents", - version, - )?, - height_to_ohlc_in_cents: BytesVec::forced_import( - db, - "ohlc_in_cents", - version, - )?, - }) - } -} diff --git a/crates/brk_computer/src/price/ohlc/mod.rs b/crates/brk_computer/src/price/ohlc/mod.rs deleted file mode 100644 index f8623047a..000000000 --- a/crates/brk_computer/src/price/ohlc/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -mod import; -mod vecs; - -pub use vecs::Vecs; diff --git a/crates/brk_computer/src/price/ohlc/vecs.rs b/crates/brk_computer/src/price/ohlc/vecs.rs deleted file mode 100644 index 0dbdf0367..000000000 --- a/crates/brk_computer/src/price/ohlc/vecs.rs +++ /dev/null @@ -1,9 +0,0 @@ -use brk_traversable::Traversable; -use brk_types::{DateIndex, Height, OHLCCents}; -use vecdb::BytesVec; - -#[derive(Clone, Traversable)] -pub struct Vecs { - pub dateindex_to_ohlc_in_cents: BytesVec, - pub height_to_ohlc_in_cents: BytesVec, -} diff --git a/crates/brk_computer/src/price/sats/compute.rs b/crates/brk_computer/src/price/sats/compute.rs index 1d8e0be17..7137e9e07 100644 --- a/crates/brk_computer/src/price/sats/compute.rs +++ b/crates/brk_computer/src/price/sats/compute.rs @@ -2,8 +2,8 @@ use brk_error::Result; use brk_types::{Close, High, Low, OHLCSats, Open, Sats}; use vecdb::Exit; -use super::Vecs; use super::super::usd; +use super::Vecs; use crate::ComputeIndexes; impl Vecs { @@ -13,90 +13,74 @@ impl Vecs { usd: &usd::Vecs, exit: &Exit, ) -> Result<()> { - // Chain indexes in sats (1 BTC / price) - self.chainindexes_to_price_open_in_sats - .compute(starting_indexes, exit, |v| { + // Open: first-value aggregation (1 BTC / price) + self.split.open.height.compute_transform( + starting_indexes.height, + &usd.split.open.height, + |(i, open, ..)| (i, Open::new(Sats::ONE_BTC / *open)), + exit, + )?; + self.split + .open + .compute_rest(starting_indexes, exit, |v| { v.compute_transform( - starting_indexes.height, - &usd.chainindexes_to_price_open.height, + starting_indexes.dateindex, + &usd.split.open.dateindex, |(i, open, ..)| (i, Open::new(Sats::ONE_BTC / *open)), exit, )?; Ok(()) })?; - self.chainindexes_to_price_high_in_sats - .compute(starting_indexes, exit, |v| { + // High: max-value aggregation (sats high = 1 BTC / usd low) + self.split.high.height.compute_transform( + starting_indexes.height, + &usd.split.low.height, + |(i, low, ..)| (i, High::new(Sats::ONE_BTC / *low)), + exit, + )?; + self.split + .high + .compute_rest(starting_indexes, exit, |v| { v.compute_transform( - starting_indexes.height, - &usd.chainindexes_to_price_low.height, + starting_indexes.dateindex, + &usd.split.low.dateindex, |(i, low, ..)| (i, High::new(Sats::ONE_BTC / *low)), exit, )?; Ok(()) })?; - self.chainindexes_to_price_low_in_sats - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &usd.chainindexes_to_price_high.height, - |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), - exit, - )?; - Ok(()) - })?; + // Low: min-value aggregation (sats low = 1 BTC / usd high) + self.split.low.height.compute_transform( + starting_indexes.height, + &usd.split.high.height, + |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), + exit, + )?; + self.split.low.compute_rest(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + &usd.split.high.dateindex, + |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), + exit, + )?; + Ok(()) + })?; - self.chainindexes_to_price_close_in_sats - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &usd.chainindexes_to_price_close.height, - |(i, close, ..)| (i, Close::new(Sats::ONE_BTC / *close)), - exit, - )?; - Ok(()) - })?; - - // Time indexes in sats - self.timeindexes_to_price_open_in_sats - .compute_all(starting_indexes, exit, |v| { + // Close: last-value aggregation + self.split.close.height.compute_transform( + starting_indexes.height, + &usd.split.close.height, + |(i, close, ..)| (i, Close::new(Sats::ONE_BTC / *close)), + exit, + )?; + self.split + .close + .compute_rest(starting_indexes, exit, |v| { v.compute_transform( starting_indexes.dateindex, - &usd.timeindexes_to_price_open.dateindex, - |(i, open, ..)| (i, Open::new(Sats::ONE_BTC / *open)), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_high_in_sats - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &usd.timeindexes_to_price_low.dateindex, - |(i, low, ..)| (i, High::new(Sats::ONE_BTC / *low)), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_low_in_sats - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &usd.timeindexes_to_price_high.dateindex, - |(i, high, ..)| (i, Low::new(Sats::ONE_BTC / *high)), - exit, - )?; - Ok(()) - })?; - - self.timeindexes_to_price_close_in_sats - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &usd.timeindexes_to_price_close.dateindex, + &usd.split.close.dateindex, |(i, close, ..)| (i, Close::new(Sats::ONE_BTC / *close)), exit, )?; @@ -104,12 +88,12 @@ impl Vecs { })?; // Height OHLC in sats - self.height_to_price_ohlc_in_sats.compute_transform4( + self.ohlc.height.compute_transform4( starting_indexes.height, - &self.chainindexes_to_price_open_in_sats.height, - &self.chainindexes_to_price_high_in_sats.height, - &self.chainindexes_to_price_low_in_sats.height, - &self.chainindexes_to_price_close_in_sats.height, + &self.split.open.height, + &self.split.high.height, + &self.split.low.height, + &self.split.close.height, |(i, open, high, low, close, _)| { ( i, @@ -125,12 +109,12 @@ impl Vecs { )?; // DateIndex OHLC in sats - self.dateindex_to_price_ohlc_in_sats.compute_transform4( + self.ohlc.dateindex.compute_transform4( starting_indexes.dateindex, - &self.timeindexes_to_price_open_in_sats.dateindex, - &self.timeindexes_to_price_high_in_sats.dateindex, - &self.timeindexes_to_price_low_in_sats.dateindex, - &self.timeindexes_to_price_close_in_sats.dateindex, + &self.split.open.dateindex, + &self.split.high.dateindex, + &self.split.low.dateindex, + &self.split.close.dateindex, |(i, open, high, low, close, _)| { ( i, @@ -146,12 +130,12 @@ impl Vecs { )?; // Period OHLC in sats - self.weekindex_to_price_ohlc_in_sats.compute_transform4( + self.ohlc.week.compute_transform4( starting_indexes.weekindex, - &*self.timeindexes_to_price_open_in_sats.weekindex, - &*self.timeindexes_to_price_high_in_sats.weekindex, - &*self.timeindexes_to_price_low_in_sats.weekindex, - &*self.timeindexes_to_price_close_in_sats.weekindex, + &*self.split.open.weekindex, + &*self.split.high.weekindex, + &*self.split.low.weekindex, + &*self.split.close.weekindex, |(i, open, high, low, close, _)| { ( i, @@ -166,33 +150,32 @@ impl Vecs { exit, )?; - self.difficultyepoch_to_price_ohlc_in_sats - .compute_transform4( - starting_indexes.difficultyepoch, - &*self.chainindexes_to_price_open_in_sats.difficultyepoch, - &*self.chainindexes_to_price_high_in_sats.difficultyepoch, - &*self.chainindexes_to_price_low_in_sats.difficultyepoch, - &*self.chainindexes_to_price_close_in_sats.difficultyepoch, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; + self.ohlc.difficultyepoch.compute_transform4( + starting_indexes.difficultyepoch, + &*self.split.open.difficultyepoch, + &*self.split.high.difficultyepoch, + &*self.split.low.difficultyepoch, + &*self.split.close.difficultyepoch, + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; - self.monthindex_to_price_ohlc_in_sats.compute_transform4( + self.ohlc.month.compute_transform4( starting_indexes.monthindex, - &*self.timeindexes_to_price_open_in_sats.monthindex, - &*self.timeindexes_to_price_high_in_sats.monthindex, - &*self.timeindexes_to_price_low_in_sats.monthindex, - &*self.timeindexes_to_price_close_in_sats.monthindex, + &*self.split.open.monthindex, + &*self.split.high.monthindex, + &*self.split.low.monthindex, + &*self.split.close.monthindex, |(i, open, high, low, close, _)| { ( i, @@ -207,12 +190,12 @@ impl Vecs { exit, )?; - self.quarterindex_to_price_ohlc_in_sats.compute_transform4( + self.ohlc.quarter.compute_transform4( starting_indexes.quarterindex, - &*self.timeindexes_to_price_open_in_sats.quarterindex, - &*self.timeindexes_to_price_high_in_sats.quarterindex, - &*self.timeindexes_to_price_low_in_sats.quarterindex, - &*self.timeindexes_to_price_close_in_sats.quarterindex, + &*self.split.open.quarterindex, + &*self.split.high.quarterindex, + &*self.split.low.quarterindex, + &*self.split.close.quarterindex, |(i, open, high, low, close, _)| { ( i, @@ -227,33 +210,32 @@ impl Vecs { exit, )?; - self.semesterindex_to_price_ohlc_in_sats - .compute_transform4( - starting_indexes.semesterindex, - &*self.timeindexes_to_price_open_in_sats.semesterindex, - &*self.timeindexes_to_price_high_in_sats.semesterindex, - &*self.timeindexes_to_price_low_in_sats.semesterindex, - &*self.timeindexes_to_price_close_in_sats.semesterindex, - |(i, open, high, low, close, _)| { - ( - i, - OHLCSats { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; + self.ohlc.semester.compute_transform4( + starting_indexes.semesterindex, + &*self.split.open.semesterindex, + &*self.split.high.semesterindex, + &*self.split.low.semesterindex, + &*self.split.close.semesterindex, + |(i, open, high, low, close, _)| { + ( + i, + OHLCSats { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; - self.yearindex_to_price_ohlc_in_sats.compute_transform4( + self.ohlc.year.compute_transform4( starting_indexes.yearindex, - &*self.timeindexes_to_price_open_in_sats.yearindex, - &*self.timeindexes_to_price_high_in_sats.yearindex, - &*self.timeindexes_to_price_low_in_sats.yearindex, - &*self.timeindexes_to_price_close_in_sats.yearindex, + &*self.split.open.yearindex, + &*self.split.high.yearindex, + &*self.split.low.yearindex, + &*self.split.close.yearindex, |(i, open, high, low, close, _)| { ( i, @@ -268,12 +250,12 @@ impl Vecs { exit, )?; - self.decadeindex_to_price_ohlc_in_sats.compute_transform4( + self.ohlc.decade.compute_transform4( starting_indexes.decadeindex, - &*self.timeindexes_to_price_open_in_sats.decadeindex, - &*self.timeindexes_to_price_high_in_sats.decadeindex, - &*self.timeindexes_to_price_low_in_sats.decadeindex, - &*self.timeindexes_to_price_close_in_sats.decadeindex, + &*self.split.open.decadeindex, + &*self.split.high.decadeindex, + &*self.split.low.decadeindex, + &*self.split.close.decadeindex, |(i, open, high, low, close, _)| { ( i, diff --git a/crates/brk_computer/src/price/sats/import.rs b/crates/brk_computer/src/price/sats/import.rs index 2443c3676..7c45915c2 100644 --- a/crates/brk_computer/src/price/sats/import.rs +++ b/crates/brk_computer/src/price/sats/import.rs @@ -5,108 +5,24 @@ use vecdb::{Database, EagerVec, ImportableVec}; use super::Vecs; use crate::{ indexes, - internal::{ - ComputedChainFirst, ComputedChainLast, ComputedChainMax, ComputedChainMin, - ComputedDateLast, ComputedVecsDateFirst, ComputedVecsDateMax, ComputedVecsDateMin, - }, + internal::{OHLCComputedVecs, OHLCPeriodVecs}, }; impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - dateindex_to_price_ohlc_in_sats: EagerVec::forced_import( - db, - "price_ohlc_in_sats", - version, - )?, - height_to_price_ohlc_in_sats: EagerVec::forced_import( - db, - "price_ohlc_in_sats", - version, - )?, - timeindexes_to_price_open_in_sats: ComputedVecsDateFirst::forced_import( - db, - "price_open_in_sats", - version, - indexes, - )?, - timeindexes_to_price_high_in_sats: ComputedVecsDateMax::forced_import( - db, - "price_high_in_sats", - version, - indexes, - )?, - timeindexes_to_price_low_in_sats: ComputedVecsDateMin::forced_import( - db, - "price_low_in_sats", - version, - indexes, - )?, - timeindexes_to_price_close_in_sats: ComputedDateLast::forced_import( - db, - "price_close_in_sats", - version, - indexes, - )?, - chainindexes_to_price_open_in_sats: ComputedChainFirst::forced_import( - db, - "price_open_in_sats", - version, - indexes, - )?, - chainindexes_to_price_high_in_sats: ComputedChainMax::forced_import( - db, - "price_high_in_sats", - version, - indexes, - )?, - chainindexes_to_price_low_in_sats: ComputedChainMin::forced_import( - db, - "price_low_in_sats", - version, - indexes, - )?, - chainindexes_to_price_close_in_sats: ComputedChainLast::forced_import( - db, - "price_close_in_sats", - version, - indexes, - )?, - weekindex_to_price_ohlc_in_sats: EagerVec::forced_import( - db, - "price_ohlc_in_sats", - version, - )?, - difficultyepoch_to_price_ohlc_in_sats: EagerVec::forced_import( - db, - "price_ohlc_in_sats", - version, - )?, - monthindex_to_price_ohlc_in_sats: EagerVec::forced_import( - db, - "price_ohlc_in_sats", - version, - )?, - quarterindex_to_price_ohlc_in_sats: EagerVec::forced_import( - db, - "price_ohlc_in_sats", - version, - )?, - semesterindex_to_price_ohlc_in_sats: EagerVec::forced_import( - db, - "price_ohlc_in_sats", - version, - )?, - yearindex_to_price_ohlc_in_sats: EagerVec::forced_import( - db, - "price_ohlc_in_sats", - version, - )?, - decadeindex_to_price_ohlc_in_sats: EagerVec::forced_import( - db, - "price_ohlc_in_sats", - version, - )?, + split: OHLCComputedVecs::forced_import(db, "price_sats", version, indexes)?, + ohlc: OHLCPeriodVecs { + dateindex: EagerVec::forced_import(db, "price_ohlc_sats", version)?, + week: EagerVec::forced_import(db, "price_ohlc_sats", version)?, + month: EagerVec::forced_import(db, "price_ohlc_sats", version)?, + quarter: EagerVec::forced_import(db, "price_ohlc_sats", version)?, + semester: EagerVec::forced_import(db, "price_ohlc_sats", version)?, + year: EagerVec::forced_import(db, "price_ohlc_sats", version)?, + decade: EagerVec::forced_import(db, "price_ohlc_sats", version)?, + height: EagerVec::forced_import(db, "price_ohlc_sats", version)?, + difficultyepoch: EagerVec::forced_import(db, "price_ohlc_sats", version)?, + }, }) } } diff --git a/crates/brk_computer/src/price/sats/vecs.rs b/crates/brk_computer/src/price/sats/vecs.rs index 28d1d4578..9c585a2ec 100644 --- a/crates/brk_computer/src/price/sats/vecs.rs +++ b/crates/brk_computer/src/price/sats/vecs.rs @@ -1,39 +1,10 @@ use brk_traversable::Traversable; -use brk_types::{ - Close, DateIndex, DecadeIndex, DifficultyEpoch, Height, High, Low, MonthIndex, OHLCSats, Open, - QuarterIndex, Sats, SemesterIndex, WeekIndex, YearIndex, -}; -use vecdb::{BytesVec, EagerVec}; +use brk_types::{OHLCSats, Sats}; -use crate::internal::{ - ComputedChainFirst, ComputedChainLast, ComputedChainMax, ComputedChainMin, ComputedDateLast, - ComputedVecsDateFirst, ComputedVecsDateMax, ComputedVecsDateMin, -}; +use crate::internal::{OHLCComputedVecs, OHLCPeriodVecs}; #[derive(Clone, Traversable)] pub struct Vecs { - // OHLC in sats - pub dateindex_to_price_ohlc_in_sats: EagerVec>, - pub height_to_price_ohlc_in_sats: EagerVec>, - - // Computed time indexes in sats - pub timeindexes_to_price_open_in_sats: ComputedVecsDateFirst>, - pub timeindexes_to_price_high_in_sats: ComputedVecsDateMax>, - pub timeindexes_to_price_low_in_sats: ComputedVecsDateMin>, - pub timeindexes_to_price_close_in_sats: ComputedDateLast>, - - // Computed chain indexes in sats (KISS types) - pub chainindexes_to_price_open_in_sats: ComputedChainFirst>, - pub chainindexes_to_price_high_in_sats: ComputedChainMax>, - pub chainindexes_to_price_low_in_sats: ComputedChainMin>, - pub chainindexes_to_price_close_in_sats: ComputedChainLast>, - - // Period OHLC in sats - pub weekindex_to_price_ohlc_in_sats: EagerVec>, - pub difficultyepoch_to_price_ohlc_in_sats: EagerVec>, - pub monthindex_to_price_ohlc_in_sats: EagerVec>, - pub quarterindex_to_price_ohlc_in_sats: EagerVec>, - pub semesterindex_to_price_ohlc_in_sats: EagerVec>, - pub yearindex_to_price_ohlc_in_sats: EagerVec>, - pub decadeindex_to_price_ohlc_in_sats: EagerVec>, + pub split: OHLCComputedVecs, + pub ohlc: OHLCPeriodVecs, } diff --git a/crates/brk_computer/src/price/usd/compute.rs b/crates/brk_computer/src/price/usd/compute.rs index b5c7a8c32..39b3ad5c1 100644 --- a/crates/brk_computer/src/price/usd/compute.rs +++ b/crates/brk_computer/src/price/usd/compute.rs @@ -1,7 +1,8 @@ use brk_error::Result; -use brk_types::OHLCDollars; +use brk_types::{Close, Dollars, High, Low, OHLCDollars, Open}; use vecdb::Exit; +use super::super::cents; use super::Vecs; use crate::ComputeIndexes; @@ -9,105 +10,104 @@ impl Vecs { pub fn compute( &mut self, starting_indexes: &ComputeIndexes, + cents: ¢s::Vecs, exit: &Exit, ) -> Result<()> { - // Timeindexes computed vecs - self.timeindexes_to_price_close - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &self.dateindex_to_price_ohlc, - |(di, ohlc, ..)| (di, ohlc.close), - exit, - )?; - Ok(()) - })?; + // Open: first-value aggregation + self.split.open.height.compute_transform( + starting_indexes.height, + ¢s.split.height.open, + |(h, open, ..)| (h, Open::new(Dollars::from(*open))), + exit, + )?; + self.split.open.compute_rest(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + ¢s.split.dateindex.open, + |(di, open, ..)| (di, Open::new(Dollars::from(*open))), + exit, + )?; + Ok(()) + })?; - self.timeindexes_to_price_high - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &self.dateindex_to_price_ohlc, - |(di, ohlc, ..)| (di, ohlc.high), - exit, - )?; - Ok(()) - })?; + // High: max-value aggregation + self.split.high.height.compute_transform( + starting_indexes.height, + ¢s.split.height.high, + |(h, high, ..)| (h, High::new(Dollars::from(*high))), + exit, + )?; + self.split.high.compute_rest(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + ¢s.split.dateindex.high, + |(di, high, ..)| (di, High::new(Dollars::from(*high))), + exit, + )?; + Ok(()) + })?; - self.timeindexes_to_price_low - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &self.dateindex_to_price_ohlc, - |(di, ohlc, ..)| (di, ohlc.low), - exit, - )?; - Ok(()) - })?; + // Low: min-value aggregation + self.split.low.height.compute_transform( + starting_indexes.height, + ¢s.split.height.low, + |(h, low, ..)| (h, Low::new(Dollars::from(*low))), + exit, + )?; + self.split.low.compute_rest(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + ¢s.split.dateindex.low, + |(di, low, ..)| (di, Low::new(Dollars::from(*low))), + exit, + )?; + Ok(()) + })?; - self.timeindexes_to_price_open - .compute_all(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.dateindex, - &self.dateindex_to_price_ohlc, - |(di, ohlc, ..)| (di, ohlc.open), - exit, - )?; - Ok(()) - })?; + // Close: last-value aggregation + self.split.close.height.compute_transform( + starting_indexes.height, + ¢s.split.height.close, + |(h, close, ..)| (h, Close::new(Dollars::from(*close))), + exit, + )?; + self.split.close.compute_rest(starting_indexes, exit, |v| { + v.compute_transform( + starting_indexes.dateindex, + ¢s.split.dateindex.close, + |(di, close, ..)| (di, Close::new(Dollars::from(*close))), + exit, + )?; + Ok(()) + })?; - // Chainindexes computed vecs - self.chainindexes_to_price_close - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.height_to_price_ohlc, - |(h, ohlc, ..)| (h, ohlc.close), - exit, - )?; - Ok(()) - })?; + // Period OHLC aggregates - time based + self.ohlc.dateindex.compute_transform4( + starting_indexes.dateindex, + &self.split.open.dateindex, + &self.split.high.dateindex, + &self.split.low.dateindex, + &self.split.close.dateindex, + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; - self.chainindexes_to_price_high - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.height_to_price_ohlc, - |(h, ohlc, ..)| (h, ohlc.high), - exit, - )?; - Ok(()) - })?; - - self.chainindexes_to_price_low - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.height_to_price_ohlc, - |(h, ohlc, ..)| (h, ohlc.low), - exit, - )?; - Ok(()) - })?; - - self.chainindexes_to_price_open - .compute(starting_indexes, exit, |v| { - v.compute_transform( - starting_indexes.height, - &self.height_to_price_ohlc, - |(h, ohlc, ..)| (h, ohlc.open), - exit, - )?; - Ok(()) - })?; - - // Period OHLC aggregates - self.weekindex_to_price_ohlc.compute_transform4( + self.ohlc.week.compute_transform4( starting_indexes.weekindex, - &*self.timeindexes_to_price_open.weekindex, - &*self.timeindexes_to_price_high.weekindex, - &*self.timeindexes_to_price_low.weekindex, - &*self.timeindexes_to_price_close.weekindex, + &*self.split.open.weekindex, + &*self.split.high.weekindex, + &*self.split.low.weekindex, + &*self.split.close.weekindex, |(i, open, high, low, close, _)| { ( i, @@ -122,32 +122,12 @@ impl Vecs { exit, )?; - self.difficultyepoch_to_price_ohlc.compute_transform4( - starting_indexes.difficultyepoch, - &*self.chainindexes_to_price_open.difficultyepoch, - &*self.chainindexes_to_price_high.difficultyepoch, - &*self.chainindexes_to_price_low.difficultyepoch, - &*self.chainindexes_to_price_close.difficultyepoch, - |(i, open, high, low, close, _)| { - ( - i, - OHLCDollars { - open, - high, - low, - close, - }, - ) - }, - exit, - )?; - - self.monthindex_to_price_ohlc.compute_transform4( + self.ohlc.month.compute_transform4( starting_indexes.monthindex, - &*self.timeindexes_to_price_open.monthindex, - &*self.timeindexes_to_price_high.monthindex, - &*self.timeindexes_to_price_low.monthindex, - &*self.timeindexes_to_price_close.monthindex, + &*self.split.open.monthindex, + &*self.split.high.monthindex, + &*self.split.low.monthindex, + &*self.split.close.monthindex, |(i, open, high, low, close, _)| { ( i, @@ -162,12 +142,12 @@ impl Vecs { exit, )?; - self.quarterindex_to_price_ohlc.compute_transform4( + self.ohlc.quarter.compute_transform4( starting_indexes.quarterindex, - &*self.timeindexes_to_price_open.quarterindex, - &*self.timeindexes_to_price_high.quarterindex, - &*self.timeindexes_to_price_low.quarterindex, - &*self.timeindexes_to_price_close.quarterindex, + &*self.split.open.quarterindex, + &*self.split.high.quarterindex, + &*self.split.low.quarterindex, + &*self.split.close.quarterindex, |(i, open, high, low, close, _)| { ( i, @@ -182,12 +162,12 @@ impl Vecs { exit, )?; - self.semesterindex_to_price_ohlc.compute_transform4( + self.ohlc.semester.compute_transform4( starting_indexes.semesterindex, - &*self.timeindexes_to_price_open.semesterindex, - &*self.timeindexes_to_price_high.semesterindex, - &*self.timeindexes_to_price_low.semesterindex, - &*self.timeindexes_to_price_close.semesterindex, + &*self.split.open.semesterindex, + &*self.split.high.semesterindex, + &*self.split.low.semesterindex, + &*self.split.close.semesterindex, |(i, open, high, low, close, _)| { ( i, @@ -202,12 +182,12 @@ impl Vecs { exit, )?; - self.yearindex_to_price_ohlc.compute_transform4( + self.ohlc.year.compute_transform4( starting_indexes.yearindex, - &*self.timeindexes_to_price_open.yearindex, - &*self.timeindexes_to_price_high.yearindex, - &*self.timeindexes_to_price_low.yearindex, - &*self.timeindexes_to_price_close.yearindex, + &*self.split.open.yearindex, + &*self.split.high.yearindex, + &*self.split.low.yearindex, + &*self.split.close.yearindex, |(i, open, high, low, close, _)| { ( i, @@ -222,12 +202,53 @@ impl Vecs { exit, )?; - self.decadeindex_to_price_ohlc.compute_transform4( + self.ohlc.decade.compute_transform4( starting_indexes.decadeindex, - &*self.timeindexes_to_price_open.decadeindex, - &*self.timeindexes_to_price_high.decadeindex, - &*self.timeindexes_to_price_low.decadeindex, - &*self.timeindexes_to_price_close.decadeindex, + &*self.split.open.decadeindex, + &*self.split.high.decadeindex, + &*self.split.low.decadeindex, + &*self.split.close.decadeindex, + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + // Period OHLC aggregates - chain based + self.ohlc.height.compute_transform4( + starting_indexes.height, + &self.split.open.height, + &self.split.high.height, + &self.split.low.height, + &self.split.close.height, + |(i, open, high, low, close, _)| { + ( + i, + OHLCDollars { + open, + high, + low, + close, + }, + ) + }, + exit, + )?; + + self.ohlc.difficultyepoch.compute_transform4( + starting_indexes.difficultyepoch, + &*self.split.open.difficultyepoch, + &*self.split.high.difficultyepoch, + &*self.split.low.difficultyepoch, + &*self.split.close.difficultyepoch, |(i, open, high, low, close, _)| { ( i, diff --git a/crates/brk_computer/src/price/usd/import.rs b/crates/brk_computer/src/price/usd/import.rs index e5f16d45c..c10378453 100644 --- a/crates/brk_computer/src/price/usd/import.rs +++ b/crates/brk_computer/src/price/usd/import.rs @@ -1,160 +1,28 @@ use brk_error::Result; -use brk_types::{DateIndex, Height, OHLCDollars, Version}; -use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1}; +use brk_types::Version; +use vecdb::{Database, EagerVec, ImportableVec}; -use super::super::ohlc; use super::Vecs; use crate::{ indexes, - internal::{ - ComputedChainFirst, ComputedChainLast, ComputedChainMax, ComputedChainMin, - ComputedDateLast, ComputedVecsDateFirst, ComputedVecsDateMax, ComputedVecsDateMin, - }, + internal::{OHLCComputedVecs, OHLCPeriodVecs}, }; impl Vecs { - pub fn forced_import( - db: &Database, - version: Version, - indexes: &indexes::Vecs, - ohlc: &ohlc::Vecs, - ) -> Result { - let height_to_price_ohlc = LazyVecFrom1::init( - "price_ohlc", - version, - ohlc.height_to_ohlc_in_cents.boxed_clone(), - |height: Height, ohlc_iter| ohlc_iter.get(height).map(OHLCDollars::from), - ); - - let height_to_price_open_in_cents = LazyVecFrom1::init( - "price_open_in_cents", - version, - ohlc.height_to_ohlc_in_cents.boxed_clone(), - |height: Height, ohlc_iter| ohlc_iter.get(height).map(|o| o.open), - ); - - let height_to_price_high_in_cents = LazyVecFrom1::init( - "price_high_in_cents", - version, - ohlc.height_to_ohlc_in_cents.boxed_clone(), - |height: Height, ohlc_iter| ohlc_iter.get(height).map(|o| o.high), - ); - - let height_to_price_low_in_cents = LazyVecFrom1::init( - "price_low_in_cents", - version, - ohlc.height_to_ohlc_in_cents.boxed_clone(), - |height: Height, ohlc_iter| ohlc_iter.get(height).map(|o| o.low), - ); - - let height_to_price_close_in_cents = LazyVecFrom1::init( - "price_close_in_cents", - version, - ohlc.height_to_ohlc_in_cents.boxed_clone(), - |height: Height, ohlc_iter| ohlc_iter.get(height).map(|o| o.close), - ); - - let dateindex_to_price_open_in_cents = LazyVecFrom1::init( - "price_open_in_cents", - version, - ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), - |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(|o| o.open), - ); - - let dateindex_to_price_high_in_cents = LazyVecFrom1::init( - "price_high_in_cents", - version, - ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), - |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(|o| o.high), - ); - - let dateindex_to_price_low_in_cents = LazyVecFrom1::init( - "price_low_in_cents", - version, - ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), - |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(|o| o.low), - ); - - let dateindex_to_price_close_in_cents = LazyVecFrom1::init( - "price_close_in_cents", - version, - ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), - |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(|o| o.close), - ); - - let dateindex_to_price_ohlc = LazyVecFrom1::init( - "price_ohlc", - version, - ohlc.dateindex_to_ohlc_in_cents.boxed_clone(), - |di: DateIndex, ohlc_iter| ohlc_iter.get(di).map(OHLCDollars::from), - ); - + pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - dateindex_to_price_ohlc, - dateindex_to_price_close_in_cents, - dateindex_to_price_high_in_cents, - dateindex_to_price_low_in_cents, - dateindex_to_price_open_in_cents, - height_to_price_close_in_cents, - height_to_price_high_in_cents, - height_to_price_low_in_cents, - height_to_price_open_in_cents, - timeindexes_to_price_open: ComputedVecsDateFirst::forced_import( - db, - "price_open", - version, - indexes, - )?, - timeindexes_to_price_high: ComputedVecsDateMax::forced_import( - db, - "price_high", - version, - indexes, - )?, - timeindexes_to_price_low: ComputedVecsDateMin::forced_import( - db, - "price_low", - version, - indexes, - )?, - timeindexes_to_price_close: ComputedDateLast::forced_import( - db, - "price_close", - version, - indexes, - )?, - chainindexes_to_price_open: ComputedChainFirst::forced_import( - db, - "price_open", - version, - indexes, - )?, - chainindexes_to_price_high: ComputedChainMax::forced_import( - db, - "price_high", - version, - indexes, - )?, - chainindexes_to_price_low: ComputedChainMin::forced_import( - db, - "price_low", - version, - indexes, - )?, - chainindexes_to_price_close: ComputedChainLast::forced_import( - db, - "price_close", - version, - indexes, - )?, - weekindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, - difficultyepoch_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, - monthindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, - quarterindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, - semesterindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, - yearindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, - decadeindex_to_price_ohlc: EagerVec::forced_import(db, "price_ohlc", version)?, - height_to_price_ohlc, + split: OHLCComputedVecs::forced_import(db, "price", version, indexes)?, + ohlc: OHLCPeriodVecs { + dateindex: EagerVec::forced_import(db, "price_ohlc", version)?, + week: EagerVec::forced_import(db, "price_ohlc", version)?, + month: EagerVec::forced_import(db, "price_ohlc", version)?, + quarter: EagerVec::forced_import(db, "price_ohlc", version)?, + semester: EagerVec::forced_import(db, "price_ohlc", version)?, + year: EagerVec::forced_import(db, "price_ohlc", version)?, + decade: EagerVec::forced_import(db, "price_ohlc", version)?, + height: EagerVec::forced_import(db, "price_ohlc", version)?, + difficultyepoch: EagerVec::forced_import(db, "price_ohlc", version)?, + }, }) } } diff --git a/crates/brk_computer/src/price/usd/vecs.rs b/crates/brk_computer/src/price/usd/vecs.rs index 997dc9f26..136ff6a4f 100644 --- a/crates/brk_computer/src/price/usd/vecs.rs +++ b/crates/brk_computer/src/price/usd/vecs.rs @@ -1,52 +1,10 @@ use brk_traversable::Traversable; -use brk_types::{ - Cents, Close, DateIndex, DecadeIndex, DifficultyEpoch, Dollars, Height, High, Low, MonthIndex, - OHLCCents, OHLCDollars, Open, QuarterIndex, SemesterIndex, WeekIndex, YearIndex, -}; -use vecdb::{BytesVec, EagerVec, LazyVecFrom1}; +use brk_types::{Dollars, OHLCDollars}; -use crate::internal::{ - ComputedChainFirst, ComputedChainLast, ComputedChainMax, ComputedChainMin, ComputedDateLast, - ComputedVecsDateFirst, ComputedVecsDateMax, ComputedVecsDateMin, -}; +use crate::internal::{OHLCComputedVecs, OHLCPeriodVecs}; #[derive(Clone, Traversable)] pub struct Vecs { - // Derived price data in cents - pub dateindex_to_price_close_in_cents: - LazyVecFrom1, DateIndex, OHLCCents>, - pub dateindex_to_price_high_in_cents: - LazyVecFrom1, DateIndex, OHLCCents>, - pub dateindex_to_price_low_in_cents: LazyVecFrom1, DateIndex, OHLCCents>, - pub dateindex_to_price_open_in_cents: - LazyVecFrom1, DateIndex, OHLCCents>, - pub height_to_price_close_in_cents: LazyVecFrom1, Height, OHLCCents>, - pub height_to_price_high_in_cents: LazyVecFrom1, Height, OHLCCents>, - pub height_to_price_low_in_cents: LazyVecFrom1, Height, OHLCCents>, - pub height_to_price_open_in_cents: LazyVecFrom1, Height, OHLCCents>, - - // OHLC in dollars - pub dateindex_to_price_ohlc: LazyVecFrom1, - pub height_to_price_ohlc: LazyVecFrom1, - - // Computed time indexes - pub timeindexes_to_price_close: ComputedDateLast>, - pub timeindexes_to_price_high: ComputedVecsDateMax>, - pub timeindexes_to_price_low: ComputedVecsDateMin>, - pub timeindexes_to_price_open: ComputedVecsDateFirst>, - - // Computed chain indexes (KISS types) - pub chainindexes_to_price_close: ComputedChainLast>, - pub chainindexes_to_price_high: ComputedChainMax>, - pub chainindexes_to_price_low: ComputedChainMin>, - pub chainindexes_to_price_open: ComputedChainFirst>, - - // Period OHLC - pub weekindex_to_price_ohlc: EagerVec>, - pub difficultyepoch_to_price_ohlc: EagerVec>, - pub monthindex_to_price_ohlc: EagerVec>, - pub quarterindex_to_price_ohlc: EagerVec>, - pub semesterindex_to_price_ohlc: EagerVec>, - pub yearindex_to_price_ohlc: EagerVec>, - pub decadeindex_to_price_ohlc: EagerVec>, + pub split: OHLCComputedVecs, + pub ohlc: OHLCPeriodVecs, } diff --git a/crates/brk_computer/src/scripts/count/compute.rs b/crates/brk_computer/src/scripts/count/compute.rs index 8297c8047..e7b4c1da4 100644 --- a/crates/brk_computer/src/scripts/count/compute.rs +++ b/crates/brk_computer/src/scripts/count/compute.rs @@ -4,7 +4,7 @@ use brk_types::StoredU64; use vecdb::{Exit, TypedVecIterator}; use super::Vecs; -use crate::{indexes, ComputeIndexes}; +use crate::{ComputeIndexes, indexes}; impl Vecs { pub fn compute( @@ -14,147 +14,146 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_p2a_count + self.p2a.compute_all(indexes, starting_indexes, exit, |v| { + v.compute_count_from_indexes( + starting_indexes.height, + &indexer.vecs.addresses.first_p2aaddressindex, + &indexer.vecs.addresses.p2abytes, + exit, + )?; + Ok(()) + })?; + + self.p2ms .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.address.height_to_first_p2aaddressindex, - &indexer.vecs.address.p2aaddressindex_to_p2abytes, + &indexer.vecs.scripts.first_p2msoutputindex, + &indexer.vecs.scripts.p2ms_to_txindex, exit, )?; Ok(()) })?; - self.indexes_to_p2ms_count + self.p2pk33 .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.output.height_to_first_p2msoutputindex, - &indexer.vecs.output.p2msoutputindex_to_txindex, + &indexer.vecs.addresses.first_p2pk33addressindex, + &indexer.vecs.addresses.p2pk33bytes, exit, )?; Ok(()) })?; - self.indexes_to_p2pk33_count + self.p2pk65 .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.address.height_to_first_p2pk33addressindex, - &indexer.vecs.address.p2pk33addressindex_to_p2pk33bytes, + &indexer.vecs.addresses.first_p2pk65addressindex, + &indexer.vecs.addresses.p2pk65bytes, exit, )?; Ok(()) })?; - self.indexes_to_p2pk65_count + self.p2pkh .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.address.height_to_first_p2pk65addressindex, - &indexer.vecs.address.p2pk65addressindex_to_p2pk65bytes, + &indexer.vecs.addresses.first_p2pkhaddressindex, + &indexer.vecs.addresses.p2pkhbytes, exit, )?; Ok(()) })?; - self.indexes_to_p2pkh_count + self.p2sh .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.address.height_to_first_p2pkhaddressindex, - &indexer.vecs.address.p2pkhaddressindex_to_p2pkhbytes, + &indexer.vecs.addresses.first_p2shaddressindex, + &indexer.vecs.addresses.p2shbytes, exit, )?; Ok(()) })?; - self.indexes_to_p2sh_count + self.p2tr .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.address.height_to_first_p2shaddressindex, - &indexer.vecs.address.p2shaddressindex_to_p2shbytes, + &indexer.vecs.addresses.first_p2traddressindex, + &indexer.vecs.addresses.p2trbytes, exit, )?; Ok(()) })?; - self.indexes_to_p2tr_count + self.p2wpkh .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.address.height_to_first_p2traddressindex, - &indexer.vecs.address.p2traddressindex_to_p2trbytes, + &indexer.vecs.addresses.first_p2wpkhaddressindex, + &indexer.vecs.addresses.p2wpkhbytes, exit, )?; Ok(()) })?; - self.indexes_to_p2wpkh_count + self.p2wsh .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.address.height_to_first_p2wpkhaddressindex, - &indexer.vecs.address.p2wpkhaddressindex_to_p2wpkhbytes, + &indexer.vecs.addresses.first_p2wshaddressindex, + &indexer.vecs.addresses.p2wshbytes, exit, )?; Ok(()) })?; - self.indexes_to_p2wsh_count + self.opreturn .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.address.height_to_first_p2wshaddressindex, - &indexer.vecs.address.p2wshaddressindex_to_p2wshbytes, + &indexer.vecs.scripts.first_opreturnindex, + &indexer.vecs.scripts.opreturn_to_txindex, exit, )?; Ok(()) })?; - self.indexes_to_opreturn_count + self.unknownoutput .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.output.height_to_first_opreturnindex, - &indexer.vecs.output.opreturnindex_to_txindex, + &indexer.vecs.scripts.first_unknownoutputindex, + &indexer.vecs.scripts.unknown_to_txindex, exit, )?; Ok(()) })?; - self.indexes_to_unknownoutput_count + self.emptyoutput .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.output.height_to_first_unknownoutputindex, - &indexer.vecs.output.unknownoutputindex_to_txindex, + &indexer.vecs.scripts.first_emptyoutputindex, + &indexer.vecs.scripts.empty_to_txindex, exit, )?; Ok(()) })?; - self.indexes_to_emptyoutput_count - .compute_all(indexes, starting_indexes, exit, |v| { - v.compute_count_from_indexes( - starting_indexes.height, - &indexer.vecs.output.height_to_first_emptyoutputindex, - &indexer.vecs.output.emptyoutputindex_to_txindex, - exit, - )?; - Ok(()) - })?; + // Compute segwit = p2wpkh + p2wsh + p2tr + let mut p2wsh_iter = self.p2wsh.height.into_iter(); + let mut p2tr_iter = self.p2tr.height.into_iter(); - // Compute segwit_count = p2wpkh + p2wsh + p2tr - let mut p2wsh_iter = self.indexes_to_p2wsh_count.height.into_iter(); - let mut p2tr_iter = self.indexes_to_p2tr_count.height.into_iter(); - - self.indexes_to_segwit_count + self.segwit .compute_all(indexes, starting_indexes, exit, |v| { v.compute_transform( starting_indexes.height, - &self.indexes_to_p2wpkh_count.height, + &self.p2wpkh.height, |(h, p2wpkh, ..)| { let sum = *p2wpkh + *p2wsh_iter.get_unwrap(h) + *p2tr_iter.get_unwrap(h); (h, StoredU64::from(sum)) diff --git a/crates/brk_computer/src/scripts/count/import.rs b/crates/brk_computer/src/scripts/count/import.rs index f422227c8..61485e567 100644 --- a/crates/brk_computer/src/scripts/count/import.rs +++ b/crates/brk_computer/src/scripts/count/import.rs @@ -5,9 +5,7 @@ use vecdb::{Database, IterableCloneableVec}; use super::Vecs; use crate::{ indexes, - internal::{ - ComputedBlockFull, BinaryBlockFull, PercentageU64F32, - }, + internal::{BinaryBlockFull, ComputedBlockFull, PercentageU64F32}, outputs, }; @@ -18,122 +16,65 @@ impl Vecs { indexes: &indexes::Vecs, outputs: &outputs::Vecs, ) -> Result { - let indexes_to_p2a_count = ComputedBlockFull::forced_import( - db, - "p2a_count", - version, - indexes, - )?; - let indexes_to_p2ms_count = ComputedBlockFull::forced_import( - db, - "p2ms_count", - version, - indexes, - )?; - let indexes_to_p2pk33_count = ComputedBlockFull::forced_import( - db, - "p2pk33_count", - version, - indexes, - )?; - let indexes_to_p2pk65_count = ComputedBlockFull::forced_import( - db, - "p2pk65_count", - version, - indexes, - )?; - let indexes_to_p2pkh_count = ComputedBlockFull::forced_import( - db, - "p2pkh_count", - version, - indexes, - )?; - let indexes_to_p2sh_count = ComputedBlockFull::forced_import( - db, - "p2sh_count", - version, - indexes, - )?; - let indexes_to_p2tr_count = ComputedBlockFull::forced_import( - db, - "p2tr_count", - version, - indexes, - )?; - let indexes_to_p2wpkh_count = ComputedBlockFull::forced_import( - db, - "p2wpkh_count", - version, - indexes, - )?; - let indexes_to_p2wsh_count = ComputedBlockFull::forced_import( - db, - "p2wsh_count", - version, - indexes, - )?; + let p2a = ComputedBlockFull::forced_import(db, "p2a_count", version, indexes)?; + let p2ms = ComputedBlockFull::forced_import(db, "p2ms_count", version, indexes)?; + let p2pk33 = ComputedBlockFull::forced_import(db, "p2pk33_count", version, indexes)?; + let p2pk65 = ComputedBlockFull::forced_import(db, "p2pk65_count", version, indexes)?; + let p2pkh = ComputedBlockFull::forced_import(db, "p2pkh_count", version, indexes)?; + let p2sh = ComputedBlockFull::forced_import(db, "p2sh_count", version, indexes)?; + let p2tr = ComputedBlockFull::forced_import(db, "p2tr_count", version, indexes)?; + let p2wpkh = ComputedBlockFull::forced_import(db, "p2wpkh_count", version, indexes)?; + let p2wsh = ComputedBlockFull::forced_import(db, "p2wsh_count", version, indexes)?; // Aggregate counts (computed from per-type counts) - let indexes_to_segwit_count = ComputedBlockFull::forced_import( - db, - "segwit_count", - version, - indexes, - )?; + let segwit = ComputedBlockFull::forced_import(db, "segwit_count", version, indexes)?; // Adoption ratios (lazy) - // Uses outputs.count.indexes_to_count as denominator (total output count) + // Uses outputs.count.count as denominator (total output count) // At height level: per-block ratio; at dateindex level: sum-based ratio (% of new outputs) - let indexes_to_taproot_adoption = - BinaryBlockFull::from_height_and_txindex::( - "taproot_adoption", - version, - indexes_to_p2tr_count.height.boxed_clone(), - outputs.count.indexes_to_count.height.sum_cum.sum.0.boxed_clone(), - &indexes_to_p2tr_count, - &outputs.count.indexes_to_count, - ); - let indexes_to_segwit_adoption = - BinaryBlockFull::from_height_and_txindex::( - "segwit_adoption", - version, - indexes_to_segwit_count.height.boxed_clone(), - outputs.count.indexes_to_count.height.sum_cum.sum.0.boxed_clone(), - &indexes_to_segwit_count, - &outputs.count.indexes_to_count, - ); + let taproot_adoption = BinaryBlockFull::from_height_and_txindex::( + "taproot_adoption", + version, + p2tr.height.boxed_clone(), + outputs.count.total_count.height.sum_cum.sum.0.boxed_clone(), + &p2tr, + &outputs.count.total_count, + ); + let segwit_adoption = BinaryBlockFull::from_height_and_txindex::( + "segwit_adoption", + version, + segwit.height.boxed_clone(), + outputs.count.total_count.height.sum_cum.sum.0.boxed_clone(), + &segwit, + &outputs.count.total_count, + ); Ok(Self { - indexes_to_p2a_count, - indexes_to_p2ms_count, - indexes_to_p2pk33_count, - indexes_to_p2pk65_count, - indexes_to_p2pkh_count, - indexes_to_p2sh_count, - indexes_to_p2tr_count, - indexes_to_p2wpkh_count, - indexes_to_p2wsh_count, - indexes_to_opreturn_count: ComputedBlockFull::forced_import( - db, - "opreturn_count", - version, - indexes, - )?, - indexes_to_emptyoutput_count: ComputedBlockFull::forced_import( + p2a, + p2ms, + p2pk33, + p2pk65, + p2pkh, + p2sh, + p2tr, + p2wpkh, + p2wsh, + opreturn: ComputedBlockFull::forced_import(db, "opreturn_count", version, indexes)?, + emptyoutput: ComputedBlockFull::forced_import( db, "emptyoutput_count", version, indexes, )?, - indexes_to_unknownoutput_count: ComputedBlockFull::forced_import( + unknownoutput: ComputedBlockFull::forced_import( db, "unknownoutput_count", version, indexes, )?, - indexes_to_segwit_count, - indexes_to_taproot_adoption, - indexes_to_segwit_adoption, + segwit, + taproot_adoption, + segwit_adoption, }) } } diff --git a/crates/brk_computer/src/scripts/count/vecs.rs b/crates/brk_computer/src/scripts/count/vecs.rs index 60f1870a2..5d60da069 100644 --- a/crates/brk_computer/src/scripts/count/vecs.rs +++ b/crates/brk_computer/src/scripts/count/vecs.rs @@ -1,32 +1,29 @@ use brk_traversable::Traversable; use brk_types::{StoredF32, StoredU64}; -use crate::internal::{ComputedBlockFull, BinaryBlockFull}; +use crate::internal::{BinaryBlockFull, ComputedBlockFull}; #[derive(Clone, Traversable)] pub struct Vecs { // Per-type output counts - pub indexes_to_p2a_count: ComputedBlockFull, - pub indexes_to_p2ms_count: ComputedBlockFull, - pub indexes_to_p2pk33_count: ComputedBlockFull, - pub indexes_to_p2pk65_count: ComputedBlockFull, - pub indexes_to_p2pkh_count: ComputedBlockFull, - pub indexes_to_p2sh_count: ComputedBlockFull, - pub indexes_to_p2tr_count: ComputedBlockFull, - pub indexes_to_p2wpkh_count: ComputedBlockFull, - pub indexes_to_p2wsh_count: ComputedBlockFull, - pub indexes_to_opreturn_count: ComputedBlockFull, - pub indexes_to_emptyoutput_count: ComputedBlockFull, - pub indexes_to_unknownoutput_count: ComputedBlockFull, + pub p2a: ComputedBlockFull, + pub p2ms: ComputedBlockFull, + pub p2pk33: ComputedBlockFull, + pub p2pk65: ComputedBlockFull, + pub p2pkh: ComputedBlockFull, + pub p2sh: ComputedBlockFull, + pub p2tr: ComputedBlockFull, + pub p2wpkh: ComputedBlockFull, + pub p2wsh: ComputedBlockFull, + pub opreturn: ComputedBlockFull, + pub emptyoutput: ComputedBlockFull, + pub unknownoutput: ComputedBlockFull, // Aggregate counts /// SegWit output count (p2wpkh + p2wsh + p2tr) - pub indexes_to_segwit_count: ComputedBlockFull, + pub segwit: ComputedBlockFull, - // Adoption ratios (lazy) - // Denominator is outputs.count.indexes_to_count (total output count) - /// Taproot adoption: p2tr / total_outputs * 100 - pub indexes_to_taproot_adoption: BinaryBlockFull, - /// SegWit adoption: segwit / total_outputs * 100 - pub indexes_to_segwit_adoption: BinaryBlockFull, + // Adoption ratios + pub taproot_adoption: BinaryBlockFull, + pub segwit_adoption: BinaryBlockFull, } diff --git a/crates/brk_computer/src/scripts/value/compute.rs b/crates/brk_computer/src/scripts/value/compute.rs index 3451c404a..5e8f375e3 100644 --- a/crates/brk_computer/src/scripts/value/compute.rs +++ b/crates/brk_computer/src/scripts/value/compute.rs @@ -4,7 +4,7 @@ use brk_types::{Height, OutputType, Sats, TxOutIndex}; use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, TypedVecIterator, VecIndex}; use super::Vecs; -use crate::{indexes, price, ComputeIndexes}; +use crate::{ComputeIndexes, indexes, price}; impl Vecs { pub fn compute( @@ -15,20 +15,16 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_opreturn_value.compute_all( - indexes, - price, - starting_indexes, - exit, - |height_vec| { + self.opreturn + .compute_all(indexes, price, starting_indexes, exit, |height_vec| { // Validate computed versions against dependencies - let dep_version = indexer.vecs.txout.height_to_first_txoutindex.version() - + indexer.vecs.txout.txoutindex_to_outputtype.version() - + indexer.vecs.txout.txoutindex_to_value.version(); + let dep_version = indexer.vecs.outputs.first_txoutindex.version() + + indexer.vecs.outputs.outputtype.version() + + indexer.vecs.outputs.value.version(); height_vec.validate_computed_version_or_reset(dep_version)?; // Get target height - let target_len = indexer.vecs.txout.height_to_first_txoutindex.len(); + let target_len = indexer.vecs.outputs.first_txoutindex.len(); if target_len == 0 { return Ok(()); } @@ -45,10 +41,9 @@ impl Vecs { // Prepare iterators let mut height_to_first_txoutindex = - indexer.vecs.txout.height_to_first_txoutindex.iter()?; - let mut txoutindex_to_outputtype = - indexer.vecs.txout.txoutindex_to_outputtype.iter()?; - let mut txoutindex_to_value = indexer.vecs.txout.txoutindex_to_value.iter()?; + indexer.vecs.outputs.first_txoutindex.iter()?; + let mut txoutindex_to_outputtype = indexer.vecs.outputs.outputtype.iter()?; + let mut txoutindex_to_value = indexer.vecs.outputs.value.iter()?; // Iterate blocks for h in starting_height.to_usize()..=target_height.to_usize() { @@ -59,7 +54,7 @@ impl Vecs { let next_first_txoutindex = if height < target_height { height_to_first_txoutindex.get_unwrap(height.incremented()) } else { - TxOutIndex::from(indexer.vecs.txout.txoutindex_to_value.len()) + TxOutIndex::from(indexer.vecs.outputs.value.len()) }; // Sum opreturn values @@ -80,8 +75,7 @@ impl Vecs { height_vec.write()?; Ok(()) - }, - )?; + })?; Ok(()) } diff --git a/crates/brk_computer/src/scripts/value/import.rs b/crates/brk_computer/src/scripts/value/import.rs index fc3dd7600..726d19684 100644 --- a/crates/brk_computer/src/scripts/value/import.rs +++ b/crates/brk_computer/src/scripts/value/import.rs @@ -12,16 +12,14 @@ impl Vecs { indexes: &indexes::Vecs, compute_dollars: bool, ) -> Result { - let indexes_to_opreturn_value = ValueBlockFull::forced_import( - db, - "opreturn_value", - version, - indexes, - compute_dollars, - )?; - Ok(Self { - indexes_to_opreturn_value, + opreturn: ValueBlockFull::forced_import( + db, + "opreturn_value", + version, + indexes, + compute_dollars, + )?, }) } } diff --git a/crates/brk_computer/src/scripts/value/vecs.rs b/crates/brk_computer/src/scripts/value/vecs.rs index 3ddab2dcf..740316a8c 100644 --- a/crates/brk_computer/src/scripts/value/vecs.rs +++ b/crates/brk_computer/src/scripts/value/vecs.rs @@ -4,5 +4,5 @@ use crate::internal::ValueBlockFull; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_opreturn_value: ValueBlockFull, + pub opreturn: ValueBlockFull, } diff --git a/crates/brk_computer/src/supply/burned/compute.rs b/crates/brk_computer/src/supply/burned/compute.rs index 840a13ede..b9a6b51dd 100644 --- a/crates/brk_computer/src/supply/burned/compute.rs +++ b/crates/brk_computer/src/supply/burned/compute.rs @@ -3,7 +3,7 @@ use brk_types::{Height, Sats}; use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, TypedVecIterator, VecIndex}; use super::Vecs; -use crate::{blocks, indexes, price, scripts, ComputeIndexes}; +use crate::{ComputeIndexes, blocks, indexes, price, scripts}; impl Vecs { pub fn compute( @@ -16,20 +16,15 @@ impl Vecs { exit: &Exit, ) -> Result<()> { // 1. Compute opreturn supply - copy per-block opreturn values from scripts - self.indexes_to_opreturn.compute_all( - indexes, - price, - starting_indexes, - exit, - |height_vec| { + self.opreturn + .compute_all(indexes, price, starting_indexes, exit, |height_vec| { // Validate computed versions against dependencies - // KISS: height is now inside indexes_to_opreturn_value.sats.height - let opreturn_dep_version = - scripts.value.indexes_to_opreturn_value.sats.height.version(); + + let opreturn_dep_version = scripts.value.opreturn.sats.height.version(); height_vec.validate_computed_version_or_reset(opreturn_dep_version)?; // Copy per-block opreturn values from scripts - let scripts_target = scripts.value.indexes_to_opreturn_value.sats.height.len(); + let scripts_target = scripts.value.opreturn.sats.height.len(); if scripts_target > 0 { let target_height = Height::from(scripts_target - 1); let current_len = height_vec.len(); @@ -38,7 +33,7 @@ impl Vecs { if starting_height <= target_height { let mut opreturn_value_iter = - scripts.value.indexes_to_opreturn_value.sats.height.into_iter(); + scripts.value.opreturn.sats.height.into_iter(); for h in starting_height.to_usize()..=target_height.to_usize() { let height = Height::from(h); @@ -50,21 +45,15 @@ impl Vecs { height_vec.write()?; Ok(()) - }, - )?; + })?; // 2. Compute unspendable supply = opreturn + unclaimed_rewards + genesis (at height 0) // Get reference to opreturn height vec for computing unspendable - let opreturn_height = &self.indexes_to_opreturn.sats.height; - let unclaimed_height = &blocks.rewards.indexes_to_unclaimed_rewards.sats.height; + let opreturn_height = &self.opreturn.sats.height; + let unclaimed_height = &blocks.rewards.unclaimed_rewards.sats.height; - self.indexes_to_unspendable.compute_all( - indexes, - price, - starting_indexes, - exit, - |height_vec| { - // KISS: height is now a concrete field (no Option) + self.unspendable + .compute_all(indexes, price, starting_indexes, exit, |height_vec| { let unspendable_dep_version = opreturn_height.version() + unclaimed_height.version(); height_vec.validate_computed_version_or_reset(unspendable_dep_version)?; @@ -78,7 +67,7 @@ impl Vecs { if starting_height <= target_height { let mut opreturn_iter = opreturn_height.into_iter(); - // KISS: height is now a concrete field (no Option) + let mut unclaimed_rewards_iter = unclaimed_height.into_iter(); for h in starting_height.to_usize()..=target_height.to_usize() { @@ -105,8 +94,7 @@ impl Vecs { height_vec.write()?; Ok(()) - }, - )?; + })?; Ok(()) } diff --git a/crates/brk_computer/src/supply/burned/import.rs b/crates/brk_computer/src/supply/burned/import.rs index 6ab3f2036..ebfd0e16b 100644 --- a/crates/brk_computer/src/supply/burned/import.rs +++ b/crates/brk_computer/src/supply/burned/import.rs @@ -12,25 +12,21 @@ impl Vecs { indexes: &indexes::Vecs, compute_dollars: bool, ) -> Result { - let indexes_to_opreturn = ValueBlockSumCum::forced_import( - db, - "opreturn_supply", - version, - indexes, - compute_dollars, - )?; - - let indexes_to_unspendable = ValueBlockSumCum::forced_import( - db, - "unspendable_supply", - version, - indexes, - compute_dollars, - )?; - Ok(Self { - indexes_to_opreturn, - indexes_to_unspendable, + opreturn: ValueBlockSumCum::forced_import( + db, + "opreturn_supply", + version, + indexes, + compute_dollars, + )?, + unspendable: ValueBlockSumCum::forced_import( + db, + "unspendable_supply", + version, + indexes, + compute_dollars, + )?, }) } } diff --git a/crates/brk_computer/src/supply/burned/vecs.rs b/crates/brk_computer/src/supply/burned/vecs.rs index e1ebdc109..009bd30c9 100644 --- a/crates/brk_computer/src/supply/burned/vecs.rs +++ b/crates/brk_computer/src/supply/burned/vecs.rs @@ -5,6 +5,6 @@ use crate::internal::ValueBlockSumCum; /// Burned/unspendable supply metrics #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_opreturn: ValueBlockSumCum, - pub indexes_to_unspendable: ValueBlockSumCum, + pub opreturn: ValueBlockSumCum, + pub unspendable: ValueBlockSumCum, } diff --git a/crates/brk_computer/src/supply/circulating/import.rs b/crates/brk_computer/src/supply/circulating/import.rs index 2d38ded16..21c0d05d7 100644 --- a/crates/brk_computer/src/supply/circulating/import.rs +++ b/crates/brk_computer/src/supply/circulating/import.rs @@ -1,55 +1,18 @@ use brk_types::Version; -use vecdb::{IterableCloneableVec, LazyVecFrom1}; use super::Vecs; use crate::{ distribution, - internal::{DollarsIdentity, LazyValueDateLast, SatsIdentity, SatsToBitcoin}, + internal::{DollarsIdentity, LazyLastBlockValue, SatsIdentity}, }; impl Vecs { pub fn import(version: Version, distribution: &distribution::Vecs) -> Self { - // Reference distribution's actual circulating supply lazily let supply_metrics = &distribution.utxo_cohorts.all.metrics.supply; - let height_to_sats = LazyVecFrom1::init( - "circulating_sats", - version, - supply_metrics.height_to_supply.boxed_clone(), - |height, iter| iter.get(height), - ); - - let height_to_btc = LazyVecFrom1::transformed::( - "circulating_btc", - version, - supply_metrics.height_to_supply.boxed_clone(), - ); - - let height_to_usd = supply_metrics - .height_to_supply_value - .dollars - .as_ref() - .map(|d| { - LazyVecFrom1::init( - "circulating_usd", - version, - d.boxed_clone(), - |height, iter| iter.get(height), - ) - }); - - // Create lazy identity wrapper around the FULL supply (not half!) - KISS - let indexes = LazyValueDateLast::from_source::< + Self(LazyLastBlockValue::from_block_source::< SatsIdentity, - SatsToBitcoin, DollarsIdentity, - >("circulating", &supply_metrics.indexes_to_supply, version); - - Self { - height_to_sats, - height_to_btc, - height_to_usd, - indexes, - } + >("circulating_supply", &supply_metrics.supply, version)) } } diff --git a/crates/brk_computer/src/supply/circulating/vecs.rs b/crates/brk_computer/src/supply/circulating/vecs.rs index f927015e2..886985ded 100644 --- a/crates/brk_computer/src/supply/circulating/vecs.rs +++ b/crates/brk_computer/src/supply/circulating/vecs.rs @@ -1,14 +1,8 @@ use brk_traversable::Traversable; -use brk_types::{Bitcoin, Dollars, Height, Sats}; -use vecdb::LazyVecFrom1; +use derive_more::{Deref, DerefMut}; -use crate::internal::LazyValueDateLast; +use crate::internal::LazyLastBlockValue; /// Circulating supply - lazy references to distribution's actual supply (KISS) -#[derive(Clone, Traversable)] -pub struct Vecs { - pub height_to_sats: LazyVecFrom1, - pub height_to_btc: LazyVecFrom1, - pub height_to_usd: Option>, - pub indexes: LazyValueDateLast, -} +#[derive(Clone, Deref, DerefMut, Traversable)] +pub struct Vecs(pub LazyLastBlockValue); diff --git a/crates/brk_computer/src/supply/inflation/compute.rs b/crates/brk_computer/src/supply/inflation/compute.rs index 94ed73992..21a569ae4 100644 --- a/crates/brk_computer/src/supply/inflation/compute.rs +++ b/crates/brk_computer/src/supply/inflation/compute.rs @@ -2,7 +2,7 @@ use brk_error::Result; use vecdb::Exit; use super::Vecs; -use crate::{blocks, distribution, ComputeIndexes}; +use crate::{ComputeIndexes, blocks, distribution}; impl Vecs { pub fn compute( @@ -13,20 +13,13 @@ impl Vecs { exit: &Exit, ) -> Result<()> { // inflation = daily_subsidy / circulating_supply * 365 * 100 - let circulating_supply = &distribution - .utxo_cohorts - .all - .metrics - .supply - .indexes_to_supply; + let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.supply.sats; - self.indexes.compute_all(starting_indexes, exit, |v| { - // KISS: dateindex.sum is now a concrete field + self.compute_all(starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.dateindex, - &blocks.rewards.indexes_to_subsidy.sats.dateindex.sum_cum.sum.0, - // KISS: dateindex is no longer Option - &circulating_supply.sats_dateindex, + &blocks.rewards.subsidy.sats.dateindex.sum_cum.sum.0, + &circulating_supply.dateindex.0, |(i, subsidy_1d_sum, supply, ..)| { let inflation = if *supply > 0 { 365.0 * *subsidy_1d_sum as f64 / *supply as f64 * 100.0 diff --git a/crates/brk_computer/src/supply/inflation/import.rs b/crates/brk_computer/src/supply/inflation/import.rs index 2ee8ae2b5..c2300a771 100644 --- a/crates/brk_computer/src/supply/inflation/import.rs +++ b/crates/brk_computer/src/supply/inflation/import.rs @@ -7,15 +7,11 @@ use crate::{indexes, internal::ComputedVecsDateAverage}; impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { - let indexes_to_inflation_rate = ComputedVecsDateAverage::forced_import( + Ok(Self(ComputedVecsDateAverage::forced_import( db, "inflation_rate", version, indexes, - )?; - - Ok(Self { - indexes: indexes_to_inflation_rate, - }) + )?)) } } diff --git a/crates/brk_computer/src/supply/inflation/vecs.rs b/crates/brk_computer/src/supply/inflation/vecs.rs index ecfcaef7a..014cc95a5 100644 --- a/crates/brk_computer/src/supply/inflation/vecs.rs +++ b/crates/brk_computer/src/supply/inflation/vecs.rs @@ -1,10 +1,10 @@ use brk_traversable::Traversable; use brk_types::StoredF32; +use derive_more::{Deref, DerefMut}; use crate::internal::ComputedVecsDateAverage; /// Inflation rate metrics -#[derive(Clone, Traversable)] -pub struct Vecs { - pub indexes: ComputedVecsDateAverage, -} +#[derive(Clone, Deref, DerefMut, Traversable)] +#[traversable(transparent)] +pub struct Vecs(pub ComputedVecsDateAverage); diff --git a/crates/brk_computer/src/supply/market_cap/import.rs b/crates/brk_computer/src/supply/market_cap/import.rs index 98a526365..605252e46 100644 --- a/crates/brk_computer/src/supply/market_cap/import.rs +++ b/crates/brk_computer/src/supply/market_cap/import.rs @@ -1,37 +1,20 @@ use brk_types::Version; -use vecdb::{IterableCloneableVec, LazyVecFrom1}; +use vecdb::IterableCloneableVec; use super::Vecs; -use crate::{ - distribution, - internal::{DollarsIdentity, LazyDateLast}, -}; +use crate::{distribution, internal::{DollarsIdentity, LazyBlockLast}}; impl Vecs { - pub fn import(version: Version, distribution: &distribution::Vecs) -> Self { + pub fn import(version: Version, distribution: &distribution::Vecs) -> Option { let supply_metrics = &distribution.utxo_cohorts.all.metrics.supply; - // Market cap by height (lazy from distribution's supply in USD) - let height = supply_metrics - .height_to_supply_value - .dollars - .as_ref() - .map(|d| { - LazyVecFrom1::init("market_cap", version, d.boxed_clone(), |height, iter| { - iter.get(height) - }) - }); - - // Market cap by indexes (lazy from distribution's supply in USD) - KISS - let indexes = supply_metrics.indexes_to_supply.dollars.as_ref().map(|d| { - // KISS: dateindex is no longer Option, use from_source - LazyDateLast::from_source::( + supply_metrics.supply.dollars.as_ref().map(|d| { + Self(LazyBlockLast::from_computed::( "market_cap", version, + d.height.boxed_clone(), d, - ) - }); - - Self { height, indexes } + )) + }) } } diff --git a/crates/brk_computer/src/supply/market_cap/vecs.rs b/crates/brk_computer/src/supply/market_cap/vecs.rs index c4cf12a92..2f2f6afda 100644 --- a/crates/brk_computer/src/supply/market_cap/vecs.rs +++ b/crates/brk_computer/src/supply/market_cap/vecs.rs @@ -1,13 +1,8 @@ use brk_traversable::Traversable; -use brk_types::{Dollars, Height}; -use vecdb::LazyVecFrom1; +use brk_types::Dollars; +use derive_more::{Deref, DerefMut}; -use crate::internal::LazyDateLast; +use crate::internal::LazyBlockLast; -/// Market cap metrics - lazy references to supply in USD (KISS) -/// (market_cap = circulating_supply * price, already computed in distribution) -#[derive(Clone, Traversable)] -pub struct Vecs { - pub height: Option>, - pub indexes: Option>, -} +#[derive(Clone, Deref, DerefMut, Traversable)] +pub struct Vecs(pub LazyBlockLast); diff --git a/crates/brk_computer/src/supply/vecs.rs b/crates/brk_computer/src/supply/vecs.rs index b0048b540..227afc18a 100644 --- a/crates/brk_computer/src/supply/vecs.rs +++ b/crates/brk_computer/src/supply/vecs.rs @@ -20,5 +20,5 @@ pub struct Vecs { pub burned: burned::Vecs, pub inflation: inflation::Vecs, pub velocity: velocity::Vecs, - pub market_cap: market_cap::Vecs, + pub market_cap: Option, } diff --git a/crates/brk_computer/src/supply/velocity/compute.rs b/crates/brk_computer/src/supply/velocity/compute.rs index 358880997..e878641e5 100644 --- a/crates/brk_computer/src/supply/velocity/compute.rs +++ b/crates/brk_computer/src/supply/velocity/compute.rs @@ -2,7 +2,7 @@ use brk_error::Result; use vecdb::Exit; use super::Vecs; -use crate::{distribution, transactions, ComputeIndexes}; +use crate::{ComputeIndexes, distribution, transactions}; impl Vecs { pub fn compute( @@ -13,35 +13,29 @@ impl Vecs { exit: &Exit, ) -> Result<()> { // velocity = annualized_volume / circulating_supply - let circulating_supply = &distribution - .utxo_cohorts - .all - .metrics - .supply - .indexes_to_supply; + let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.supply; - // BTC velocity - KISS: dateindex is no longer Option - self.indexes_to_btc - .compute_all(starting_indexes, exit, |v| { - v.compute_divide( - starting_indexes.dateindex, - &transactions.volume.indexes_to_annualized_volume_btc.dateindex, - &*circulating_supply.bitcoin.dateindex, - exit, - )?; - Ok(()) - })?; + // BTC velocity + self.btc.compute_all(starting_indexes, exit, |v| { + v.compute_divide( + starting_indexes.dateindex, + &*transactions.volume.annualized_volume.bitcoin.dateindex, + &*circulating_supply.bitcoin.dateindex, + exit, + )?; + Ok(()) + })?; // USD velocity - if let Some(usd_velocity) = self.indexes_to_usd.as_mut() + if let Some(usd_velocity) = self.usd.as_mut() && let Some(supply_usd) = circulating_supply.dollars.as_ref() + && let Some(volume_usd) = transactions.volume.annualized_volume.dollars.as_ref() { - // KISS: dateindex is no longer Option usd_velocity.compute_all(starting_indexes, exit, |v| { v.compute_divide( starting_indexes.dateindex, - &transactions.volume.indexes_to_annualized_volume_usd.dateindex, - &supply_usd.dateindex, + &volume_usd.dateindex, + &supply_usd.dateindex.0, exit, )?; Ok(()) diff --git a/crates/brk_computer/src/supply/velocity/import.rs b/crates/brk_computer/src/supply/velocity/import.rs index e47d7a305..520c9023e 100644 --- a/crates/brk_computer/src/supply/velocity/import.rs +++ b/crates/brk_computer/src/supply/velocity/import.rs @@ -12,26 +12,12 @@ impl Vecs { indexes: &indexes::Vecs, compute_dollars: bool, ) -> Result { - let indexes_to_btc = ComputedVecsDateAverage::forced_import( - db, - "btc_velocity", - version, - indexes, - )?; - - let indexes_to_usd = compute_dollars.then(|| { - ComputedVecsDateAverage::forced_import( - db, - "usd_velocity", - version, - indexes, - ) - .unwrap() - }); - Ok(Self { - indexes_to_btc, - indexes_to_usd, + btc: ComputedVecsDateAverage::forced_import(db, "btc_velocity", version, indexes)?, + usd: compute_dollars.then(|| { + ComputedVecsDateAverage::forced_import(db, "usd_velocity", version, indexes) + .unwrap() + }), }) } } diff --git a/crates/brk_computer/src/supply/velocity/vecs.rs b/crates/brk_computer/src/supply/velocity/vecs.rs index 0951b2b35..9dd548acd 100644 --- a/crates/brk_computer/src/supply/velocity/vecs.rs +++ b/crates/brk_computer/src/supply/velocity/vecs.rs @@ -6,6 +6,6 @@ use crate::internal::ComputedVecsDateAverage; /// Velocity metrics (annualized volume / circulating supply) #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_btc: ComputedVecsDateAverage, - pub indexes_to_usd: Option>, + pub btc: ComputedVecsDateAverage, + pub usd: Option>, } diff --git a/crates/brk_computer/src/transactions/count/compute.rs b/crates/brk_computer/src/transactions/count/compute.rs index e8a2c1fd4..abf91b9db 100644 --- a/crates/brk_computer/src/transactions/count/compute.rs +++ b/crates/brk_computer/src/transactions/count/compute.rs @@ -3,7 +3,7 @@ use brk_indexer::Indexer; use vecdb::Exit; use super::Vecs; -use crate::{indexes, ComputeIndexes}; +use crate::{ComputeIndexes, indexes}; impl Vecs { pub fn compute( @@ -13,12 +13,12 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_tx_count + self.tx_count .compute_all(indexes, starting_indexes, exit, |v| { v.compute_count_from_indexes( starting_indexes.height, - &indexer.vecs.tx.height_to_first_txindex, - &indexer.vecs.tx.txindex_to_txid, + &indexer.vecs.transactions.first_txindex, + &indexer.vecs.transactions.txid, exit, )?; Ok(()) diff --git a/crates/brk_computer/src/transactions/count/import.rs b/crates/brk_computer/src/transactions/count/import.rs index 612d581ca..776fc3060 100644 --- a/crates/brk_computer/src/transactions/count/import.rs +++ b/crates/brk_computer/src/transactions/count/import.rs @@ -4,10 +4,7 @@ use brk_types::{StoredBool, TxIndex, Version}; use vecdb::{Database, IterableCloneableVec, LazyVecFrom2}; use super::Vecs; -use crate::{ - indexes, - internal::ComputedBlockFull, -}; +use crate::{indexes, internal::ComputedBlockFull}; impl Vecs { pub fn forced_import( @@ -19,8 +16,8 @@ impl Vecs { let txindex_to_is_coinbase = LazyVecFrom2::init( "is_coinbase", version, - indexer.vecs.tx.txindex_to_height.boxed_clone(), - indexer.vecs.tx.height_to_first_txindex.boxed_clone(), + indexer.vecs.transactions.height.boxed_clone(), + indexer.vecs.transactions.first_txindex.boxed_clone(), |index: TxIndex, txindex_to_height_iter, height_to_first_txindex_iter| { txindex_to_height_iter.get(index).map(|height| { let txindex = height_to_first_txindex_iter.get_unwrap(height); @@ -30,13 +27,8 @@ impl Vecs { ); Ok(Self { - indexes_to_tx_count: ComputedBlockFull::forced_import( - db, - "tx_count", - version, - indexes, - )?, - txindex_to_is_coinbase, + tx_count: ComputedBlockFull::forced_import(db, "tx_count", version, indexes)?, + is_coinbase: txindex_to_is_coinbase, }) } } diff --git a/crates/brk_computer/src/transactions/count/vecs.rs b/crates/brk_computer/src/transactions/count/vecs.rs index 3262e01cd..1cb8c4f5e 100644 --- a/crates/brk_computer/src/transactions/count/vecs.rs +++ b/crates/brk_computer/src/transactions/count/vecs.rs @@ -6,6 +6,6 @@ use crate::internal::ComputedBlockFull; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_tx_count: ComputedBlockFull, - pub txindex_to_is_coinbase: LazyVecFrom2, + pub tx_count: ComputedBlockFull, + pub is_coinbase: LazyVecFrom2, } diff --git a/crates/brk_computer/src/transactions/fees/compute.rs b/crates/brk_computer/src/transactions/fees/compute.rs index 21b83987f..1c51b5de1 100644 --- a/crates/brk_computer/src/transactions/fees/compute.rs +++ b/crates/brk_computer/src/transactions/fees/compute.rs @@ -3,9 +3,9 @@ use brk_indexer::Indexer; use brk_types::{FeeRate, Sats}; use vecdb::{Exit, unlikely}; -use super::Vecs; use super::super::size; -use crate::{indexes, inputs, ComputeIndexes}; +use super::Vecs; +use crate::{ComputeIndexes, indexes, inputs}; impl Vecs { #[allow(clippy::too_many_arguments)] @@ -18,26 +18,26 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.txindex_to_input_value.compute_sum_from_indexes( + self.input_value.compute_sum_from_indexes( starting_indexes.txindex, - &indexer.vecs.tx.txindex_to_first_txinindex, - &indexes.transaction.txindex_to_input_count, - &txins.spent.txinindex_to_value, + &indexer.vecs.transactions.first_txinindex, + &indexes.txindex.input_count, + &txins.spent.value, exit, )?; - self.txindex_to_output_value.compute_sum_from_indexes( + self.output_value.compute_sum_from_indexes( starting_indexes.txindex, - &indexer.vecs.tx.txindex_to_first_txoutindex, - &indexes.transaction.txindex_to_output_count, - &indexer.vecs.txout.txoutindex_to_value, + &indexer.vecs.transactions.first_txoutindex, + &indexes.txindex.output_count, + &indexer.vecs.outputs.value, exit, )?; - self.txindex_to_fee.compute_transform2( + self.fee.base.compute_transform2( starting_indexes.txindex, - &self.txindex_to_input_value, - &self.txindex_to_output_value, + &self.input_value, + &self.output_value, |(i, input, output, ..)| { let fee = if unlikely(input.is_max()) { Sats::ZERO @@ -49,29 +49,19 @@ impl Vecs { exit, )?; - self.txindex_to_fee_rate.compute_transform2( + self.fee_rate.txindex.compute_transform2( starting_indexes.txindex, - &self.txindex_to_fee, - &size_vecs.txindex_to_vsize, + &self.fee.base, + &size_vecs.vsize.txindex, |(txindex, fee, vsize, ..)| (txindex, FeeRate::from((fee, vsize))), exit, )?; - self.indexes_to_fee.derive_from( - indexer, - indexes, - starting_indexes, - &self.txindex_to_fee, - exit, - )?; + self.fee + .derive_from(indexer, indexes, starting_indexes, exit)?; - self.indexes_to_fee_rate.derive_from( - indexer, - indexes, - starting_indexes, - &self.txindex_to_fee_rate, - exit, - )?; + self.fee_rate + .derive_from(indexer, indexes, starting_indexes, exit)?; Ok(()) } diff --git a/crates/brk_computer/src/transactions/fees/import.rs b/crates/brk_computer/src/transactions/fees/import.rs index 2fc725cff..c9db30aa9 100644 --- a/crates/brk_computer/src/transactions/fees/import.rs +++ b/crates/brk_computer/src/transactions/fees/import.rs @@ -4,11 +4,7 @@ use brk_types::Version; use vecdb::{Database, EagerVec, ImportableVec}; use super::Vecs; -use crate::{ - indexes, - internal::{ComputedTxDistribution, ValueDerivedTxFull}, - price, -}; +use crate::{indexes, internal::{ComputedTxDistribution, ValueTxFull}, price}; impl Vecs { pub fn forced_import( @@ -18,31 +14,11 @@ impl Vecs { indexes: &indexes::Vecs, price: Option<&price::Vecs>, ) -> Result { - let txindex_to_input_value = EagerVec::forced_import(db, "input_value", version)?; - let txindex_to_output_value = EagerVec::forced_import(db, "output_value", version)?; - let txindex_to_fee = EagerVec::forced_import(db, "fee", version)?; - let txindex_to_fee_rate = EagerVec::forced_import(db, "fee_rate", version)?; - Ok(Self { - txindex_to_input_value, - txindex_to_output_value, - txindex_to_fee: txindex_to_fee.clone(), - txindex_to_fee_rate: txindex_to_fee_rate.clone(), - indexes_to_fee: ValueDerivedTxFull::forced_import( - db, - "fee", - version, - indexes, - indexer, - price, - &txindex_to_fee, - )?, - indexes_to_fee_rate: ComputedTxDistribution::forced_import( - db, - "fee_rate", - version, - indexes, - )?, + input_value: EagerVec::forced_import(db, "input_value", version)?, + output_value: EagerVec::forced_import(db, "output_value", version)?, + fee: ValueTxFull::forced_import(db, "fee", version, indexes, indexer, price)?, + fee_rate: ComputedTxDistribution::forced_import(db, "fee_rate", version, indexes)?, }) } } diff --git a/crates/brk_computer/src/transactions/fees/vecs.rs b/crates/brk_computer/src/transactions/fees/vecs.rs index 04899e2db..458a1cd32 100644 --- a/crates/brk_computer/src/transactions/fees/vecs.rs +++ b/crates/brk_computer/src/transactions/fees/vecs.rs @@ -2,14 +2,12 @@ use brk_traversable::Traversable; use brk_types::{FeeRate, Sats, TxIndex}; use vecdb::{EagerVec, PcoVec}; -use crate::internal::{ComputedTxDistribution, ValueDerivedTxFull}; +use crate::internal::{ComputedTxDistribution, ValueTxFull}; #[derive(Clone, Traversable)] pub struct Vecs { - pub txindex_to_input_value: EagerVec>, - pub txindex_to_output_value: EagerVec>, - pub txindex_to_fee: EagerVec>, - pub txindex_to_fee_rate: EagerVec>, - pub indexes_to_fee: ValueDerivedTxFull, - pub indexes_to_fee_rate: ComputedTxDistribution, + pub input_value: EagerVec>, + pub output_value: EagerVec>, + pub fee: ValueTxFull, + pub fee_rate: ComputedTxDistribution, } diff --git a/crates/brk_computer/src/transactions/size/compute.rs b/crates/brk_computer/src/transactions/size/compute.rs index 74c5fb160..f12604f61 100644 --- a/crates/brk_computer/src/transactions/size/compute.rs +++ b/crates/brk_computer/src/transactions/size/compute.rs @@ -13,21 +13,11 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - self.indexes_to_tx_weight.derive_from( - indexer, - indexes, - starting_indexes, - &self.txindex_to_weight, - exit, - )?; + self.weight + .derive_from(indexer, indexes, starting_indexes, exit)?; - self.indexes_to_tx_vsize.derive_from( - indexer, - indexes, - starting_indexes, - &self.txindex_to_vsize, - exit, - )?; + self.vsize + .derive_from(indexer, indexes, starting_indexes, exit)?; Ok(()) } diff --git a/crates/brk_computer/src/transactions/size/import.rs b/crates/brk_computer/src/transactions/size/import.rs index fbc5c4970..fe71bb73d 100644 --- a/crates/brk_computer/src/transactions/size/import.rs +++ b/crates/brk_computer/src/transactions/size/import.rs @@ -4,7 +4,7 @@ use brk_types::{TxIndex, VSize, Version, Weight}; use vecdb::{Database, IterableCloneableVec, LazyVecFrom2, VecIndex}; use super::Vecs; -use crate::{indexes, internal::ComputedTxDistribution}; +use crate::{indexes, internal::LazyTxDistribution}; impl Vecs { pub fn forced_import( @@ -16,43 +16,46 @@ impl Vecs { let txindex_to_weight = LazyVecFrom2::init( "weight", version, - indexer.vecs.tx.txindex_to_base_size.boxed_clone(), - indexer.vecs.tx.txindex_to_total_size.boxed_clone(), - |index: TxIndex, txindex_to_base_size_iter, txindex_to_total_size_iter| { + indexer.vecs.transactions.base_size.boxed_clone(), + indexer.vecs.transactions.total_size.boxed_clone(), + |index: TxIndex, base_size_iter, total_size_iter| { let index = index.to_usize(); - txindex_to_base_size_iter.get_at(index).map(|base_size| { - let total_size = txindex_to_total_size_iter.get_at_unwrap(index); + base_size_iter.get_at(index).map(|base_size| { + let total_size = total_size_iter.get_at_unwrap(index); Weight::from_sizes(*base_size, *total_size) }) }, ); - // Derive directly from eager sources to avoid Lazy <- Lazy let txindex_to_vsize = LazyVecFrom2::init( "vsize", version, - indexer.vecs.tx.txindex_to_base_size.boxed_clone(), - indexer.vecs.tx.txindex_to_total_size.boxed_clone(), - |index: TxIndex, txindex_to_base_size_iter, txindex_to_total_size_iter| { + indexer.vecs.transactions.base_size.boxed_clone(), + indexer.vecs.transactions.total_size.boxed_clone(), + |index: TxIndex, base_size_iter, total_size_iter| { let index = index.to_usize(); - txindex_to_base_size_iter.get_at(index).map(|base_size| { - let total_size = txindex_to_total_size_iter.get_at_unwrap(index); + base_size_iter.get_at(index).map(|base_size| { + let total_size = total_size_iter.get_at_unwrap(index); VSize::from(Weight::from_sizes(*base_size, *total_size)) }) }, ); - let indexes_to_tx_vsize = - ComputedTxDistribution::forced_import(db, "tx_vsize", version, indexes)?; - - let indexes_to_tx_weight = - ComputedTxDistribution::forced_import(db, "tx_weight", version, indexes)?; - Ok(Self { - indexes_to_tx_vsize, - indexes_to_tx_weight, - txindex_to_vsize, - txindex_to_weight, + vsize: LazyTxDistribution::forced_import( + db, + "tx_vsize", + version, + txindex_to_vsize, + indexes, + )?, + weight: LazyTxDistribution::forced_import( + db, + "tx_weight", + version, + txindex_to_weight, + indexes, + )?, }) } } diff --git a/crates/brk_computer/src/transactions/size/vecs.rs b/crates/brk_computer/src/transactions/size/vecs.rs index 34353ae32..5811b2a81 100644 --- a/crates/brk_computer/src/transactions/size/vecs.rs +++ b/crates/brk_computer/src/transactions/size/vecs.rs @@ -1,14 +1,10 @@ use brk_traversable::Traversable; -use brk_types::{StoredU32, TxIndex, VSize, Weight}; -use vecdb::LazyVecFrom2; +use brk_types::{StoredU32, VSize, Weight}; -use crate::internal::ComputedTxDistribution; +use crate::internal::LazyTxDistribution; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_tx_vsize: ComputedTxDistribution, - pub indexes_to_tx_weight: ComputedTxDistribution, - // Both derive directly from eager sources (base_size, total_size) to avoid Lazy <- Lazy - pub txindex_to_vsize: LazyVecFrom2, - pub txindex_to_weight: LazyVecFrom2, + pub vsize: LazyTxDistribution, + pub weight: LazyTxDistribution, } diff --git a/crates/brk_computer/src/transactions/versions/compute.rs b/crates/brk_computer/src/transactions/versions/compute.rs index 10382fdf3..de7d699ec 100644 --- a/crates/brk_computer/src/transactions/versions/compute.rs +++ b/crates/brk_computer/src/transactions/versions/compute.rs @@ -4,7 +4,7 @@ use brk_types::{StoredU64, TxVersion}; use vecdb::{Exit, TypedVecIterator}; use super::Vecs; -use crate::{indexes, internal::ComputedBlockSumCum, ComputeIndexes}; +use crate::{ComputeIndexes, indexes, internal::ComputedBlockSumCum}; impl Vecs { pub fn compute( @@ -14,26 +14,25 @@ impl Vecs { starting_indexes: &ComputeIndexes, exit: &Exit, ) -> Result<()> { - let compute_indexes_to_tx_vany = - |indexes_to_tx_vany: &mut ComputedBlockSumCum, txversion: TxVersion| { - let mut txindex_to_txversion_iter = indexer.vecs.tx.txindex_to_txversion.iter()?; - indexes_to_tx_vany.compute_all(indexes, starting_indexes, exit, |vec| { - vec.compute_filtered_count_from_indexes( - starting_indexes.height, - &indexer.vecs.tx.height_to_first_txindex, - &indexer.vecs.tx.txindex_to_txid, - |txindex| { - let v = txindex_to_txversion_iter.get_unwrap(txindex); - v == txversion - }, - exit, - )?; - Ok(()) - }) - }; - compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v1, TxVersion::ONE)?; - compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v2, TxVersion::TWO)?; - compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v3, TxVersion::THREE)?; + let tx_vany = |tx_vany: &mut ComputedBlockSumCum, txversion: TxVersion| { + let mut txversion_iter = indexer.vecs.transactions.txversion.iter()?; + tx_vany.compute_all(indexes, starting_indexes, exit, |vec| { + vec.compute_filtered_count_from_indexes( + starting_indexes.height, + &indexer.vecs.transactions.first_txindex, + &indexer.vecs.transactions.txid, + |txindex| { + let v = txversion_iter.get_unwrap(txindex); + v == txversion + }, + exit, + )?; + Ok(()) + }) + }; + tx_vany(&mut self.v1, TxVersion::ONE)?; + tx_vany(&mut self.v2, TxVersion::TWO)?; + tx_vany(&mut self.v3, TxVersion::THREE)?; Ok(()) } diff --git a/crates/brk_computer/src/transactions/versions/import.rs b/crates/brk_computer/src/transactions/versions/import.rs index 84500c1e9..551e1e6c7 100644 --- a/crates/brk_computer/src/transactions/versions/import.rs +++ b/crates/brk_computer/src/transactions/versions/import.rs @@ -8,24 +8,9 @@ use crate::{indexes, internal::ComputedBlockSumCum}; impl Vecs { pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result { Ok(Self { - indexes_to_tx_v1: ComputedBlockSumCum::forced_import( - db, - "tx_v1", - version, - indexes, - )?, - indexes_to_tx_v2: ComputedBlockSumCum::forced_import( - db, - "tx_v2", - version, - indexes, - )?, - indexes_to_tx_v3: ComputedBlockSumCum::forced_import( - db, - "tx_v3", - version, - indexes, - )?, + v1: ComputedBlockSumCum::forced_import(db, "tx_v1", version, indexes)?, + v2: ComputedBlockSumCum::forced_import(db, "tx_v2", version, indexes)?, + v3: ComputedBlockSumCum::forced_import(db, "tx_v3", version, indexes)?, }) } } diff --git a/crates/brk_computer/src/transactions/versions/vecs.rs b/crates/brk_computer/src/transactions/versions/vecs.rs index b7b4be019..049d95939 100644 --- a/crates/brk_computer/src/transactions/versions/vecs.rs +++ b/crates/brk_computer/src/transactions/versions/vecs.rs @@ -5,7 +5,7 @@ use crate::internal::ComputedBlockSumCum; #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_tx_v1: ComputedBlockSumCum, - pub indexes_to_tx_v2: ComputedBlockSumCum, - pub indexes_to_tx_v3: ComputedBlockSumCum, + pub v1: ComputedBlockSumCum, + pub v2: ComputedBlockSumCum, + pub v3: ComputedBlockSumCum, } diff --git a/crates/brk_computer/src/transactions/volume/compute.rs b/crates/brk_computer/src/transactions/volume/compute.rs index 6793ebd41..4d48013e6 100644 --- a/crates/brk_computer/src/transactions/volume/compute.rs +++ b/crates/brk_computer/src/transactions/volume/compute.rs @@ -1,11 +1,11 @@ use brk_error::Result; use brk_indexer::Indexer; -use brk_types::{StoredF32, ONE_DAY_IN_SEC_F64}; +use brk_types::{ONE_DAY_IN_SEC_F64, StoredF32}; use vecdb::Exit; -use super::Vecs; use super::super::{count, fees}; -use crate::{indexes, inputs, outputs, price, ComputeIndexes}; +use super::Vecs; +use crate::{ComputeIndexes, indexes, inputs, outputs, price}; impl Vecs { #[allow(clippy::too_many_arguments)] @@ -21,75 +21,68 @@ impl Vecs { price: Option<&price::Vecs>, exit: &Exit, ) -> Result<()> { - self.indexes_to_sent_sum + self.sent_sum .compute_all(indexes, price, starting_indexes, exit, |v| { v.compute_filtered_sum_from_indexes( starting_indexes.height, - &indexer.vecs.tx.height_to_first_txindex, - &indexes.block.height_to_txindex_count, - &fees_vecs.txindex_to_input_value, + &indexer.vecs.transactions.first_txindex, + &indexes.height.txindex_count, + &fees_vecs.input_value, |sats| !sats.is_max(), exit, )?; Ok(()) })?; - self.indexes_to_annualized_volume.compute_all(starting_indexes, exit, |v| { + self.annualized_volume.compute_sats(|v| { v.compute_sum( starting_indexes.dateindex, - &self.indexes_to_sent_sum.sats.dateindex.0, + &self.sent_sum.sats.dateindex.0, 365, exit, )?; Ok(()) })?; - self.indexes_to_annualized_volume_btc.compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - &*self.indexes_to_sent_sum.bitcoin.dateindex, - 365, - exit, - )?; - Ok(()) - })?; - - if let Some(indexes_to_sent_sum) = self.indexes_to_sent_sum.dollars.as_ref() { - self.indexes_to_annualized_volume_usd.compute_all(starting_indexes, exit, |v| { - v.compute_sum( - starting_indexes.dateindex, - &indexes_to_sent_sum.dateindex.0, - 365, - exit, - )?; - Ok(()) + if let Some(sent_sum_dollars) = self.sent_sum.dollars.as_ref() { + self.annualized_volume.compute_dollars(|dollars| { + dollars.compute_all(starting_indexes, exit, |v| { + v.compute_sum( + starting_indexes.dateindex, + &sent_sum_dollars.dateindex.0, + 365, + exit, + )?; + Ok(()) + }) })?; } - self.indexes_to_tx_per_sec.compute_all(starting_indexes, exit, |v| { - v.compute_transform2( - starting_indexes.dateindex, - &count_vecs.indexes_to_tx_count.dateindex.sum_cum.sum.0, - &indexes.time.dateindex_to_date, - |(i, tx_count, date, ..)| { - let completion = date.completion(); - let per_sec = if completion == 0.0 { - StoredF32::NAN - } else { - StoredF32::from(*tx_count as f64 / (completion * ONE_DAY_IN_SEC_F64)) - }; - (i, per_sec) - }, - exit, - )?; - Ok(()) - })?; + self.tx_per_sec.compute_all(starting_indexes, exit, |v| { + v.compute_transform2( + starting_indexes.dateindex, + &count_vecs.tx_count.dateindex.sum_cum.sum.0, + &indexes.dateindex.date, + |(i, tx_count, date, ..)| { + let completion = date.completion(); + let per_sec = if completion == 0.0 { + StoredF32::NAN + } else { + StoredF32::from(*tx_count as f64 / (completion * ONE_DAY_IN_SEC_F64)) + }; + (i, per_sec) + }, + exit, + )?; + Ok(()) + })?; - self.indexes_to_inputs_per_sec.compute_all(starting_indexes, exit, |v| { + self.inputs_per_sec + .compute_all(starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.dateindex, - &inputs_count.indexes_to_count.dateindex.sum_cum.sum.0, - &indexes.time.dateindex_to_date, + &inputs_count.dateindex.sum_cum.sum.0, + &indexes.dateindex.date, |(i, input_count, date, ..)| { let completion = date.completion(); let per_sec = if completion == 0.0 { @@ -104,17 +97,20 @@ impl Vecs { Ok(()) })?; - self.indexes_to_outputs_per_sec.compute_all(starting_indexes, exit, |v| { + self.outputs_per_sec + .compute_all(starting_indexes, exit, |v| { v.compute_transform2( starting_indexes.dateindex, - &outputs_count.indexes_to_count.dateindex.sum_cum.sum.0, - &indexes.time.dateindex_to_date, + &outputs_count.total_count.dateindex.sum_cum.sum.0, + &indexes.dateindex.date, |(i, output_count, date, ..)| { let completion = date.completion(); let per_sec = if completion == 0.0 { StoredF32::NAN } else { - StoredF32::from(*output_count as f64 / (completion * ONE_DAY_IN_SEC_F64)) + StoredF32::from( + *output_count as f64 / (completion * ONE_DAY_IN_SEC_F64), + ) }; (i, per_sec) }, diff --git a/crates/brk_computer/src/transactions/volume/import.rs b/crates/brk_computer/src/transactions/volume/import.rs index 738d74491..e89d87c78 100644 --- a/crates/brk_computer/src/transactions/volume/import.rs +++ b/crates/brk_computer/src/transactions/volume/import.rs @@ -3,10 +3,7 @@ use brk_types::Version; use vecdb::Database; use super::Vecs; -use crate::{ - indexes, - internal::{ComputedDateLast, ValueBlockSum}, -}; +use crate::{indexes, internal::{ComputedDateLast, ValueBlockSum, ValueDateLast}}; impl Vecs { pub fn forced_import( @@ -18,44 +15,28 @@ impl Vecs { let v2 = Version::TWO; Ok(Self { - indexes_to_sent_sum: ValueBlockSum::forced_import( + sent_sum: ValueBlockSum::forced_import( db, "sent_sum", version, indexes, compute_dollars, )?, - indexes_to_annualized_volume: ComputedDateLast::forced_import( + annualized_volume: ValueDateLast::forced_import( db, "annualized_volume", version, + compute_dollars, indexes, )?, - indexes_to_annualized_volume_btc: ComputedDateLast::forced_import( - db, - "annualized_volume_btc", - version, - indexes, - )?, - indexes_to_annualized_volume_usd: ComputedDateLast::forced_import( - db, - "annualized_volume_usd", - version, - indexes, - )?, - indexes_to_tx_per_sec: ComputedDateLast::forced_import( - db, - "tx_per_sec", - version + v2, - indexes, - )?, - indexes_to_outputs_per_sec: ComputedDateLast::forced_import( + tx_per_sec: ComputedDateLast::forced_import(db, "tx_per_sec", version + v2, indexes)?, + outputs_per_sec: ComputedDateLast::forced_import( db, "outputs_per_sec", version + v2, indexes, )?, - indexes_to_inputs_per_sec: ComputedDateLast::forced_import( + inputs_per_sec: ComputedDateLast::forced_import( db, "inputs_per_sec", version + v2, diff --git a/crates/brk_computer/src/transactions/volume/vecs.rs b/crates/brk_computer/src/transactions/volume/vecs.rs index 0cef141c5..893b115ca 100644 --- a/crates/brk_computer/src/transactions/volume/vecs.rs +++ b/crates/brk_computer/src/transactions/volume/vecs.rs @@ -1,16 +1,14 @@ use brk_traversable::Traversable; -use brk_types::{Bitcoin, Dollars, Sats, StoredF32}; +use brk_types::StoredF32; -use crate::internal::{ComputedDateLast, ValueBlockSum}; +use crate::internal::{ComputedDateLast, ValueBlockSum, ValueDateLast}; /// Volume metrics #[derive(Clone, Traversable)] pub struct Vecs { - pub indexes_to_sent_sum: ValueBlockSum, - pub indexes_to_annualized_volume: ComputedDateLast, - pub indexes_to_annualized_volume_btc: ComputedDateLast, - pub indexes_to_annualized_volume_usd: ComputedDateLast, - pub indexes_to_tx_per_sec: ComputedDateLast, - pub indexes_to_outputs_per_sec: ComputedDateLast, - pub indexes_to_inputs_per_sec: ComputedDateLast, + pub sent_sum: ValueBlockSum, + pub annualized_volume: ValueDateLast, + pub tx_per_sec: ComputedDateLast, + pub outputs_per_sec: ComputedDateLast, + pub inputs_per_sec: ComputedDateLast, } diff --git a/crates/brk_fetcher/Cargo.toml b/crates/brk_fetcher/Cargo.toml index 5548e230a..c165e8658 100644 --- a/crates/brk_fetcher/Cargo.toml +++ b/crates/brk_fetcher/Cargo.toml @@ -12,6 +12,6 @@ build = "build.rs" brk_error = { workspace = true } brk_logger = { workspace = true } brk_types = { workspace = true } -log = { workspace = true } +tracing = { workspace = true } minreq = { workspace = true } serde_json = { workspace = true } diff --git a/crates/brk_fetcher/src/binance.rs b/crates/brk_fetcher/src/binance.rs index 6e54cdecf..5f820c0fb 100644 --- a/crates/brk_fetcher/src/binance.rs +++ b/crates/brk_fetcher/src/binance.rs @@ -7,12 +7,12 @@ use std::{ use brk_error::{Error, Result}; use brk_types::{Date, Height, OHLCCents, Timestamp}; -use log::info; use serde_json::Value; +use tracing::info; use crate::{ PriceSource, default_retry, - ohlc::{compute_ohlc_from_range, ohlc_from_array, timestamp_from_ms, date_from_timestamp}, + ohlc::{compute_ohlc_from_range, date_from_timestamp, ohlc_from_array, timestamp_from_ms}, }; #[derive(Clone)] diff --git a/crates/brk_fetcher/src/brk.rs b/crates/brk_fetcher/src/brk.rs index 9a2f89c7e..efd0dd738 100644 --- a/crates/brk_fetcher/src/brk.rs +++ b/crates/brk_fetcher/src/brk.rs @@ -1,9 +1,12 @@ use std::collections::BTreeMap; use brk_error::{Error, Result}; -use brk_types::{Cents, CheckedSub, Close, Date, DateIndex, Dollars, Height, High, Low, OHLCCents, Open, Timestamp}; -use log::info; +use brk_types::{ + Cents, CheckedSub, Close, Date, DateIndex, Dollars, Height, High, Low, OHLCCents, Open, + Timestamp, +}; use serde_json::Value; +use tracing::info; use crate::{PriceSource, default_retry}; @@ -121,9 +124,7 @@ impl BRK { } pub fn ping() -> Result<()> { - minreq::get(API_URL) - .with_timeout(10) - .send()?; + minreq::get(API_URL).with_timeout(10).send()?; Ok(()) } } diff --git a/crates/brk_fetcher/src/kraken.rs b/crates/brk_fetcher/src/kraken.rs index 6fa11ed30..15e13ad55 100644 --- a/crates/brk_fetcher/src/kraken.rs +++ b/crates/brk_fetcher/src/kraken.rs @@ -2,12 +2,12 @@ use std::collections::BTreeMap; use brk_error::{Error, Result}; use brk_types::{Date, Height, OHLCCents, Timestamp}; -use log::info; use serde_json::Value; +use tracing::info; use crate::{ PriceSource, default_retry, - ohlc::{compute_ohlc_from_range, ohlc_from_array, timestamp_from_secs, date_from_timestamp}, + ohlc::{compute_ohlc_from_range, date_from_timestamp, ohlc_from_array, timestamp_from_secs}, }; #[derive(Default, Clone)] diff --git a/crates/brk_fetcher/src/lib.rs b/crates/brk_fetcher/src/lib.rs index f904a77f9..c22190b56 100644 --- a/crates/brk_fetcher/src/lib.rs +++ b/crates/brk_fetcher/src/lib.rs @@ -4,7 +4,7 @@ use std::{path::Path, thread::sleep, time::Duration}; use brk_error::{Error, Result}; use brk_types::{Date, Height, OHLCCents, Timestamp}; -use log::info; +use tracing::info; mod binance; mod brk; diff --git a/crates/brk_fetcher/src/retry.rs b/crates/brk_fetcher/src/retry.rs index ec5e8e674..fa368b9f1 100644 --- a/crates/brk_fetcher/src/retry.rs +++ b/crates/brk_fetcher/src/retry.rs @@ -1,7 +1,7 @@ use std::{thread::sleep, time::Duration}; use brk_error::Result; -use log::info; +use tracing::info; pub fn default_retry(function: impl Fn(usize) -> Result) -> Result { retry(function, 5, 6) diff --git a/crates/brk_fetcher/src/source.rs b/crates/brk_fetcher/src/source.rs index 58aca6386..042fef8c4 100644 --- a/crates/brk_fetcher/src/source.rs +++ b/crates/brk_fetcher/src/source.rs @@ -2,7 +2,7 @@ use std::time::{Duration, Instant}; use brk_error::{Error, Result}; use brk_types::{Date, Height, OHLCCents, Timestamp}; -use log::info; +use tracing::info; /// Default cooldown period for unhealthy sources (5 minutes) const DEFAULT_COOLDOWN_SECS: u64 = 5 * 60; diff --git a/crates/brk_indexer/Cargo.toml b/crates/brk_indexer/Cargo.toml index 601c45024..bafe34f95 100644 --- a/crates/brk_indexer/Cargo.toml +++ b/crates/brk_indexer/Cargo.toml @@ -20,7 +20,7 @@ brk_store = { workspace = true } brk_types = { workspace = true } brk_traversable = { workspace = true } fjall = { workspace = true } -log = { workspace = true } +tracing = { workspace = true } rayon = { workspace = true } rlimit = "0.10.2" rustc-hash = { workspace = true } diff --git a/crates/brk_indexer/examples/indexer.rs b/crates/brk_indexer/examples/indexer.rs index c74cc8c05..ac33d12b4 100644 --- a/crates/brk_indexer/examples/indexer.rs +++ b/crates/brk_indexer/examples/indexer.rs @@ -10,7 +10,7 @@ use brk_indexer::Indexer; use brk_iterator::Blocks; use brk_reader::Reader; use brk_rpc::{Auth, Client}; -use log::{debug, info}; +use tracing::{debug, info}; use vecdb::Exit; fn main() -> color_eyre::Result<()> { diff --git a/crates/brk_indexer/examples/indexer_bench.rs b/crates/brk_indexer/examples/indexer_bench.rs index 87915ed66..724975d06 100644 --- a/crates/brk_indexer/examples/indexer_bench.rs +++ b/crates/brk_indexer/examples/indexer_bench.rs @@ -12,7 +12,7 @@ use brk_indexer::Indexer; use brk_iterator::Blocks; use brk_reader::Reader; use brk_rpc::{Auth, Client}; -use log::{debug, info}; +use tracing::{debug, info}; use vecdb::Exit; fn main() -> Result<()> { diff --git a/crates/brk_indexer/examples/indexer_bench2.rs b/crates/brk_indexer/examples/indexer_bench2.rs index d49d7f3a7..14d496b5c 100644 --- a/crates/brk_indexer/examples/indexer_bench2.rs +++ b/crates/brk_indexer/examples/indexer_bench2.rs @@ -12,7 +12,7 @@ use brk_indexer::Indexer; use brk_iterator::Blocks; use brk_reader::Reader; use brk_rpc::{Auth, Client}; -use log::{debug, info}; +use tracing::{debug, info}; use vecdb::Exit; fn main() -> Result<()> { diff --git a/crates/brk_indexer/examples/indexer_read.rs b/crates/brk_indexer/examples/indexer_read.rs index e168b7a6c..f73e9f651 100644 --- a/crates/brk_indexer/examples/indexer_read.rs +++ b/crates/brk_indexer/examples/indexer_read.rs @@ -24,8 +24,8 @@ fn main() -> Result<()> { dbg!( indexer .vecs - .txout - .txoutindex_to_value + .outputs + .value .iter()? .enumerate() .take(200) diff --git a/crates/brk_indexer/examples/indexer_read_speed.rs b/crates/brk_indexer/examples/indexer_read_speed.rs index 3a1b6f948..61b888dcf 100644 --- a/crates/brk_indexer/examples/indexer_read_speed.rs +++ b/crates/brk_indexer/examples/indexer_read_speed.rs @@ -9,7 +9,7 @@ fn run_benchmark(indexer: &Indexer) -> (Sats, std::time::Duration, usize) { let mut sum = Sats::ZERO; let mut count = 0; - for value in indexer.vecs.txout.txoutindex_to_value.clean_iter().unwrap() { + for value in indexer.vecs.outputs.value.clean_iter().unwrap() { sum += value; count += 1; } diff --git a/crates/brk_indexer/src/indexes.rs b/crates/brk_indexer/src/indexes.rs index 8288978bf..a0059699e 100644 --- a/crates/brk_indexer/src/indexes.rs +++ b/crates/brk_indexer/src/indexes.rs @@ -1,6 +1,6 @@ use brk_error::Result; use brk_types::Height; -use log::debug; +use tracing::{debug, info}; use vecdb::{GenericStoredVec, IterableStoredVec, IterableVec, VecIndex, VecValue}; use crate::{Stores, Vecs}; @@ -10,181 +10,185 @@ pub use brk_types::Indexes; /// Extension trait for Indexes with brk_indexer-specific functionality. pub trait IndexesExt { fn checked_push(&self, vecs: &mut Vecs) -> Result<()>; - fn from_vecs_and_stores(min_height: Height, vecs: &mut Vecs, stores: &Stores) -> Self; + fn from_vecs_and_stores(required_height: Height, vecs: &mut Vecs, stores: &Stores) -> Option where Self: Sized; } impl IndexesExt for Indexes { fn checked_push(&self, vecs: &mut Vecs) -> Result<()> { let height = self.height; - vecs.tx - .height_to_first_txindex + vecs.transactions + .first_txindex .checked_push(height, self.txindex)?; - vecs.txin - .height_to_first_txinindex + vecs.inputs + .first_txinindex .checked_push(height, self.txinindex)?; - vecs.txout - .height_to_first_txoutindex + vecs.outputs + .first_txoutindex .checked_push(height, self.txoutindex)?; - vecs.output - .height_to_first_emptyoutputindex + vecs.scripts + .first_emptyoutputindex .checked_push(height, self.emptyoutputindex)?; - vecs.output - .height_to_first_p2msoutputindex + vecs.scripts + .first_p2msoutputindex .checked_push(height, self.p2msoutputindex)?; - vecs.output - .height_to_first_opreturnindex + vecs.scripts + .first_opreturnindex .checked_push(height, self.opreturnindex)?; - vecs.address - .height_to_first_p2aaddressindex + vecs.addresses + .first_p2aaddressindex .checked_push(height, self.p2aaddressindex)?; - vecs.output - .height_to_first_unknownoutputindex + vecs.scripts + .first_unknownoutputindex .checked_push(height, self.unknownoutputindex)?; - vecs.address - .height_to_first_p2pk33addressindex + vecs.addresses + .first_p2pk33addressindex .checked_push(height, self.p2pk33addressindex)?; - vecs.address - .height_to_first_p2pk65addressindex + vecs.addresses + .first_p2pk65addressindex .checked_push(height, self.p2pk65addressindex)?; - vecs.address - .height_to_first_p2pkhaddressindex + vecs.addresses + .first_p2pkhaddressindex .checked_push(height, self.p2pkhaddressindex)?; - vecs.address - .height_to_first_p2shaddressindex + vecs.addresses + .first_p2shaddressindex .checked_push(height, self.p2shaddressindex)?; - vecs.address - .height_to_first_p2traddressindex + vecs.addresses + .first_p2traddressindex .checked_push(height, self.p2traddressindex)?; - vecs.address - .height_to_first_p2wpkhaddressindex + vecs.addresses + .first_p2wpkhaddressindex .checked_push(height, self.p2wpkhaddressindex)?; - vecs.address - .height_to_first_p2wshaddressindex + vecs.addresses + .first_p2wshaddressindex .checked_push(height, self.p2wshaddressindex)?; Ok(()) } - fn from_vecs_and_stores(min_height: Height, vecs: &mut Vecs, stores: &Stores) -> Indexes { + fn from_vecs_and_stores(required_height: Height, vecs: &mut Vecs, stores: &Stores) -> Option { debug!("Creating indexes from vecs and stores..."); - // Height at which we want to start: min last saved + 1 or 0 - let vecs_starting_height = vecs.starting_height(); - let stores_starting_height = stores.starting_height(); - let height = vecs_starting_height.min(stores_starting_height); - if height < min_height { - dbg!(height, min_height); - unreachable!() + // Local data height: minimum of vecs and stores + let vecs_height = vecs.starting_height(); + let stores_height = stores.starting_height(); + let local_height = vecs_height.min(stores_height); + + // Data inconsistency: local data behind required height + if local_height < required_height { + return None; } + // Handle reorg: local data ahead of required height + let starting_height = if local_height > required_height { + info!( + "Reorg detected: rolling back from {} to {}", + local_height, required_height + ); + required_height + } else { + local_height + }; + let emptyoutputindex = starting_index( - &vecs.output.height_to_first_emptyoutputindex, - &vecs.output.emptyoutputindex_to_txindex, - height, + &vecs.scripts.first_emptyoutputindex, + &vecs.scripts.empty_to_txindex, + starting_height, ) .unwrap(); let p2msoutputindex = starting_index( - &vecs.output.height_to_first_p2msoutputindex, - &vecs.output.p2msoutputindex_to_txindex, - height, + &vecs.scripts.first_p2msoutputindex, + &vecs.scripts.p2ms_to_txindex, + starting_height, ) .unwrap(); let opreturnindex = starting_index( - &vecs.output.height_to_first_opreturnindex, - &vecs.output.opreturnindex_to_txindex, - height, + &vecs.scripts.first_opreturnindex, + &vecs.scripts.opreturn_to_txindex, + starting_height, ) .unwrap(); let p2pk33addressindex = starting_index( - &vecs.address.height_to_first_p2pk33addressindex, - &vecs.address.p2pk33addressindex_to_p2pk33bytes, - height, + &vecs.addresses.first_p2pk33addressindex, + &vecs.addresses.p2pk33bytes, + starting_height, ) .unwrap(); let p2pk65addressindex = starting_index( - &vecs.address.height_to_first_p2pk65addressindex, - &vecs.address.p2pk65addressindex_to_p2pk65bytes, - height, + &vecs.addresses.first_p2pk65addressindex, + &vecs.addresses.p2pk65bytes, + starting_height, ) .unwrap(); let p2pkhaddressindex = starting_index( - &vecs.address.height_to_first_p2pkhaddressindex, - &vecs.address.p2pkhaddressindex_to_p2pkhbytes, - height, + &vecs.addresses.first_p2pkhaddressindex, + &vecs.addresses.p2pkhbytes, + starting_height, ) .unwrap(); let p2shaddressindex = starting_index( - &vecs.address.height_to_first_p2shaddressindex, - &vecs.address.p2shaddressindex_to_p2shbytes, - height, + &vecs.addresses.first_p2shaddressindex, + &vecs.addresses.p2shbytes, + starting_height, ) .unwrap(); let p2traddressindex = starting_index( - &vecs.address.height_to_first_p2traddressindex, - &vecs.address.p2traddressindex_to_p2trbytes, - height, + &vecs.addresses.first_p2traddressindex, + &vecs.addresses.p2trbytes, + starting_height, ) .unwrap(); let p2wpkhaddressindex = starting_index( - &vecs.address.height_to_first_p2wpkhaddressindex, - &vecs.address.p2wpkhaddressindex_to_p2wpkhbytes, - height, + &vecs.addresses.first_p2wpkhaddressindex, + &vecs.addresses.p2wpkhbytes, + starting_height, ) .unwrap(); let p2wshaddressindex = starting_index( - &vecs.address.height_to_first_p2wshaddressindex, - &vecs.address.p2wshaddressindex_to_p2wshbytes, - height, + &vecs.addresses.first_p2wshaddressindex, + &vecs.addresses.p2wshbytes, + starting_height, ) .unwrap(); let p2aaddressindex = starting_index( - &vecs.address.height_to_first_p2aaddressindex, - &vecs.address.p2aaddressindex_to_p2abytes, - height, + &vecs.addresses.first_p2aaddressindex, + &vecs.addresses.p2abytes, + starting_height, ) .unwrap(); let txindex = starting_index( - &vecs.tx.height_to_first_txindex, - &vecs.tx.txindex_to_txid, - height, + &vecs.transactions.first_txindex, + &vecs.transactions.txid, + starting_height, ) .unwrap(); - let txinindex = starting_index( - &vecs.txin.height_to_first_txinindex, - &vecs.txin.txinindex_to_outpoint, - height, - ) - .unwrap(); + let txinindex = + starting_index(&vecs.inputs.first_txinindex, &vecs.inputs.outpoint, starting_height).unwrap(); - let txoutindex = starting_index( - &vecs.txout.height_to_first_txoutindex, - &vecs.txout.txoutindex_to_value, - height, - ) - .unwrap(); + let txoutindex = + starting_index(&vecs.outputs.first_txoutindex, &vecs.outputs.value, starting_height).unwrap(); let unknownoutputindex = starting_index( - &vecs.output.height_to_first_unknownoutputindex, - &vecs.output.unknownoutputindex_to_txindex, - height, + &vecs.scripts.first_unknownoutputindex, + &vecs.scripts.unknown_to_txindex, + starting_height, ) .unwrap(); - Indexes { + Some(Indexes { emptyoutputindex, - height, + height: starting_height, p2msoutputindex, opreturnindex, p2pk33addressindex, @@ -199,7 +203,7 @@ impl IndexesExt for Indexes { txinindex, txoutindex, unknownoutputindex, - } + }) } } diff --git a/crates/brk_indexer/src/lib.rs b/crates/brk_indexer/src/lib.rs index 8473a1f07..a9e0d6b7c 100644 --- a/crates/brk_indexer/src/lib.rs +++ b/crates/brk_indexer/src/lib.rs @@ -6,7 +6,7 @@ use brk_error::Result; use brk_iterator::Blocks; use brk_rpc::Client; use brk_types::Height; -use log::{debug, info}; +use tracing::{debug, info}; use vecdb::Exit; mod constants; mod indexes; @@ -98,18 +98,28 @@ impl Indexer { ) -> Result { debug!("Starting indexing..."); - let last_blockhash = self.vecs.block.height_to_blockhash.iter()?.last(); + let last_blockhash = self.vecs.blocks.blockhash.iter()?.last(); debug!("Last block hash found."); let (starting_indexes, prev_hash) = if let Some(hash) = last_blockhash { let (height, hash) = client.get_closest_valid_height(hash)?; - let starting_indexes = - Indexes::from_vecs_and_stores(height.incremented(), &mut self.vecs, &self.stores); - if starting_indexes.height > client.get_last_height()? { - info!("Up to date, nothing to index."); - return Ok(starting_indexes); + match Indexes::from_vecs_and_stores(height.incremented(), &mut self.vecs, &self.stores) + { + Some(starting_indexes) => { + if starting_indexes.height > client.get_last_height()? { + info!("Up to date, nothing to index."); + return Ok(starting_indexes); + } + (starting_indexes, Some(hash)) + } + None => { + // Data inconsistency detected - reset and start fresh + info!("Data inconsistency detected, resetting indexer..."); + self.vecs.reset()?; + self.stores.reset()?; + (Indexes::default(), None) + } } - (starting_indexes, Some(hash)) } else { (Indexes::default(), None) }; diff --git a/crates/brk_indexer/src/processor/metadata.rs b/crates/brk_indexer/src/processor/metadata.rs index 98144e9c5..4f32e7f0d 100644 --- a/crates/brk_indexer/src/processor/metadata.rs +++ b/crates/brk_indexer/src/processor/metadata.rs @@ -1,6 +1,6 @@ use brk_error::{Error, Result}; use brk_types::{BlockHashPrefix, Timestamp}; -use log::error; +use tracing::error; use vecdb::GenericStoredVec; use super::BlockProcessor; @@ -35,24 +35,24 @@ impl BlockProcessor<'_> { ); self.vecs - .block - .height_to_blockhash + .blocks + .blockhash .checked_push(height, blockhash.clone())?; self.vecs - .block - .height_to_difficulty + .blocks + .difficulty .checked_push(height, self.block.header.difficulty_float().into())?; self.vecs - .block - .height_to_timestamp + .blocks + .timestamp .checked_push(height, Timestamp::from(self.block.header.time))?; self.vecs - .block - .height_to_total_size + .blocks + .total_size .checked_push(height, self.block.total_size().into())?; self.vecs - .block - .height_to_weight + .blocks + .weight .checked_push(height, self.block.weight().into())?; Ok(()) diff --git a/crates/brk_indexer/src/processor/tx.rs b/crates/brk_indexer/src/processor/tx.rs index 3c300f2f8..4ec0e446c 100644 --- a/crates/brk_indexer/src/processor/tx.rs +++ b/crates/brk_indexer/src/processor/tx.rs @@ -47,7 +47,7 @@ impl<'a> BlockProcessor<'a> { return Ok(()); } - let mut txindex_to_txid_iter = self.vecs.tx.txindex_to_txid.into_iter(); + let mut txindex_to_txid_iter = self.vecs.transactions.txid.into_iter(); for ct in txs.iter() { let Some(prev_txindex) = ct.prev_txindex_opt else { continue; @@ -57,7 +57,7 @@ impl<'a> BlockProcessor<'a> { continue; } - let len = self.vecs.tx.txindex_to_txid.len(); + let len = self.vecs.transactions.txid.len(); let prev_txid = txindex_to_txid_iter .get(prev_txindex) .ok_or(Error::Internal("Missing txid for txindex")) @@ -89,32 +89,32 @@ impl<'a> BlockProcessor<'a> { } self.vecs - .tx - .txindex_to_height + .transactions + .height .checked_push(ct.txindex, height)?; self.vecs - .tx - .txindex_to_txversion + .transactions + .txversion .checked_push(ct.txindex, ct.tx.version.into())?; self.vecs - .tx - .txindex_to_txid + .transactions + .txid .checked_push(ct.txindex, ct.txid)?; self.vecs - .tx - .txindex_to_rawlocktime + .transactions + .rawlocktime .checked_push(ct.txindex, ct.tx.lock_time.into())?; self.vecs - .tx - .txindex_to_base_size + .transactions + .base_size .checked_push(ct.txindex, ct.tx.base_size().into())?; self.vecs - .tx - .txindex_to_total_size + .transactions + .total_size .checked_push(ct.txindex, ct.tx.total_size().into())?; self.vecs - .tx - .txindex_to_is_explicitly_rbf + .transactions + .is_explicitly_rbf .checked_push(ct.txindex, StoredBool::from(ct.tx.is_explicitly_rbf()))?; } diff --git a/crates/brk_indexer/src/processor/txin.rs b/crates/brk_indexer/src/processor/txin.rs index 6695416c4..dfa7123b9 100644 --- a/crates/brk_indexer/src/processor/txin.rs +++ b/crates/brk_indexer/src/processor/txin.rs @@ -1,7 +1,7 @@ use brk_error::{Error, Result}; use brk_types::{ - AddressIndexOutPoint, AddressIndexTxIndex, OutPoint, OutputType, TxInIndex, TxIndex, Txid, - TxidPrefix, TypeIndex, Unit, Vin, Vout, + AddressIndexOutPoint, AddressIndexTxIndex, OutPoint, OutputType, StoredU32, TxInIndex, TxIndex, + Txid, TxidPrefix, TypeIndex, Unit, Vin, Vout, }; use rayon::prelude::*; use rustc_hash::{FxHashMap, FxHashSet}; @@ -39,6 +39,8 @@ impl<'a> BlockProcessor<'a> { let txindex = base_txindex + block_txindex; let txinindex = base_txinindex + TxInIndex::from(block_txinindex); + let witness_size = StoredU32::from(txin.witness.size()); + if tx.is_coinbase() { return Ok(( txinindex, @@ -47,6 +49,7 @@ impl<'a> BlockProcessor<'a> { txin, vin, outpoint: OutPoint::COINBASE, + witness_size, }, )); } @@ -66,6 +69,7 @@ impl<'a> BlockProcessor<'a> { txin, vin, outpoint, + witness_size, }, )); } @@ -86,8 +90,8 @@ impl<'a> BlockProcessor<'a> { let txoutindex = self .vecs - .tx - .txindex_to_first_txoutindex + .transactions + .first_txoutindex .get_pushed_or_read(prev_txindex, &self.readers.txindex_to_first_txoutindex)? .ok_or(Error::Internal("Missing txoutindex"))? + vout; @@ -96,15 +100,15 @@ impl<'a> BlockProcessor<'a> { let outputtype = self .vecs - .txout - .txoutindex_to_outputtype + .outputs + .outputtype .get_pushed_or_read(txoutindex, &self.readers.txoutindex_to_outputtype)? .ok_or(Error::Internal("Missing outputtype"))?; let typeindex = self .vecs - .txout - .txoutindex_to_typeindex + .outputs + .typeindex .get_pushed_or_read(txoutindex, &self.readers.txoutindex_to_typeindex)? .ok_or(Error::Internal("Missing typeindex"))?; @@ -116,6 +120,7 @@ impl<'a> BlockProcessor<'a> { outpoint, outputtype, typeindex, + witness_size, }, )) }, @@ -151,22 +156,24 @@ impl<'a> BlockProcessor<'a> { let height = self.height; for (txinindex, input_source) in txins { - let (vin, txindex, outpoint, outputtype, typeindex) = match input_source { + let (vin, txindex, outpoint, outputtype, typeindex, witness_size) = match input_source { InputSource::PreviousBlock { vin, txindex, outpoint, outputtype, typeindex, - } => (vin, txindex, outpoint, outputtype, typeindex), + witness_size, + } => (vin, txindex, outpoint, outputtype, typeindex, witness_size), InputSource::SameBlock { txindex, txin, vin, outpoint, + witness_size, } => { if outpoint.is_coinbase() { - (vin, txindex, outpoint, OutputType::Unknown, TypeIndex::COINBASE) + (vin, txindex, outpoint, OutputType::Unknown, TypeIndex::COINBASE, witness_size) } else { let info = same_block_output_info .remove(&outpoint) @@ -174,34 +181,38 @@ impl<'a> BlockProcessor<'a> { .inspect_err(|_| { dbg!(&same_block_output_info, txin); })?; - (vin, txindex, outpoint, info.outputtype, info.typeindex) + (vin, txindex, outpoint, info.outputtype, info.typeindex, witness_size) } } }; if vin.is_zero() { self.vecs - .tx - .txindex_to_first_txinindex + .transactions + .first_txinindex .checked_push(txindex, txinindex)?; } self.vecs - .txin - .txinindex_to_txindex + .inputs + .txindex .checked_push(txinindex, txindex)?; self.vecs - .txin - .txinindex_to_outpoint + .inputs + .outpoint .checked_push(txinindex, outpoint)?; self.vecs - .txin - .txinindex_to_outputtype + .inputs + .outputtype .checked_push(txinindex, outputtype)?; self.vecs - .txin - .txinindex_to_typeindex + .inputs + .typeindex .checked_push(txinindex, typeindex)?; + self.vecs + .inputs + .witness_size + .checked_push(txinindex, witness_size)?; if !outputtype.is_address() { continue; diff --git a/crates/brk_indexer/src/processor/txout.rs b/crates/brk_indexer/src/processor/txout.rs index 12fa788c7..72f4883cc 100644 --- a/crates/brk_indexer/src/processor/txout.rs +++ b/crates/brk_indexer/src/processor/txout.rs @@ -144,14 +144,14 @@ impl<'a> BlockProcessor<'a> { if vout.is_zero() { self.vecs - .tx - .txindex_to_first_txoutindex + .transactions + .first_txoutindex .checked_push(txindex, txoutindex)?; } self.vecs - .txout - .txoutindex_to_txindex + .outputs + .txindex .checked_push(txoutindex, txindex)?; let typeindex = if let Some(ti) = existing_typeindex { @@ -181,29 +181,29 @@ impl<'a> BlockProcessor<'a> { match outputtype { OutputType::P2MS => { self.vecs - .output - .p2msoutputindex_to_txindex + .scripts + .p2ms_to_txindex .checked_push(self.indexes.p2msoutputindex, txindex)?; self.indexes.p2msoutputindex.copy_then_increment() } OutputType::OpReturn => { self.vecs - .output - .opreturnindex_to_txindex + .scripts + .opreturn_to_txindex .checked_push(self.indexes.opreturnindex, txindex)?; self.indexes.opreturnindex.copy_then_increment() } OutputType::Empty => { self.vecs - .output - .emptyoutputindex_to_txindex + .scripts + .empty_to_txindex .checked_push(self.indexes.emptyoutputindex, txindex)?; self.indexes.emptyoutputindex.copy_then_increment() } OutputType::Unknown => { self.vecs - .output - .unknownoutputindex_to_txindex + .scripts + .unknown_to_txindex .checked_push(self.indexes.unknownoutputindex, txindex)?; self.indexes.unknownoutputindex.copy_then_increment() } @@ -211,17 +211,14 @@ impl<'a> BlockProcessor<'a> { } }; + self.vecs.outputs.value.checked_push(txoutindex, sats)?; self.vecs - .txout - .txoutindex_to_value - .checked_push(txoutindex, sats)?; - self.vecs - .txout - .txoutindex_to_outputtype + .outputs + .outputtype .checked_push(txoutindex, outputtype)?; self.vecs - .txout - .txoutindex_to_typeindex + .outputs + .typeindex .checked_push(txoutindex, typeindex)?; if outputtype.is_unspendable() { diff --git a/crates/brk_indexer/src/processor/types.rs b/crates/brk_indexer/src/processor/types.rs index 242ec9a03..5405c56bc 100644 --- a/crates/brk_indexer/src/processor/types.rs +++ b/crates/brk_indexer/src/processor/types.rs @@ -1,7 +1,7 @@ use bitcoin::{Transaction, TxIn, TxOut}; use brk_types::{ - AddressBytes, AddressHash, OutPoint, OutputType, TxIndex, TxOutIndex, Txid, TxidPrefix, - TypeIndex, Vin, Vout, + AddressBytes, AddressHash, OutPoint, OutputType, StoredU32, TxIndex, TxOutIndex, Txid, + TxidPrefix, TypeIndex, Vin, Vout, }; #[derive(Debug)] @@ -12,12 +12,14 @@ pub enum InputSource<'a> { outpoint: OutPoint, outputtype: OutputType, typeindex: TypeIndex, + witness_size: StoredU32, }, SameBlock { txindex: TxIndex, txin: &'a TxIn, vin: Vin, outpoint: OutPoint, + witness_size: StoredU32, }, } diff --git a/crates/brk_indexer/src/readers.rs b/crates/brk_indexer/src/readers.rs index 7cafc0e46..6f6716111 100644 --- a/crates/brk_indexer/src/readers.rs +++ b/crates/brk_indexer/src/readers.rs @@ -15,27 +15,27 @@ pub struct Readers { impl Readers { pub fn new(vecs: &Vecs) -> Self { Self { - txindex_to_first_txoutindex: vecs.tx.txindex_to_first_txoutindex.create_reader(), - txoutindex_to_outputtype: vecs.txout.txoutindex_to_outputtype.create_reader(), - txoutindex_to_typeindex: vecs.txout.txoutindex_to_typeindex.create_reader(), + txindex_to_first_txoutindex: vecs.transactions.first_txoutindex.create_reader(), + txoutindex_to_outputtype: vecs.outputs.outputtype.create_reader(), + txoutindex_to_typeindex: vecs.outputs.typeindex.create_reader(), addressbytes: ByAddressType { p2pk65: vecs - .address - .p2pk65addressindex_to_p2pk65bytes + .addresses + .p2pk65bytes .create_reader(), p2pk33: vecs - .address - .p2pk33addressindex_to_p2pk33bytes + .addresses + .p2pk33bytes .create_reader(), - p2pkh: vecs.address.p2pkhaddressindex_to_p2pkhbytes.create_reader(), - p2sh: vecs.address.p2shaddressindex_to_p2shbytes.create_reader(), + p2pkh: vecs.addresses.p2pkhbytes.create_reader(), + p2sh: vecs.addresses.p2shbytes.create_reader(), p2wpkh: vecs - .address - .p2wpkhaddressindex_to_p2wpkhbytes + .addresses + .p2wpkhbytes .create_reader(), - p2wsh: vecs.address.p2wshaddressindex_to_p2wshbytes.create_reader(), - p2tr: vecs.address.p2traddressindex_to_p2trbytes.create_reader(), - p2a: vecs.address.p2aaddressindex_to_p2abytes.create_reader(), + p2wsh: vecs.addresses.p2wshbytes.create_reader(), + p2tr: vecs.addresses.p2trbytes.create_reader(), + p2a: vecs.addresses.p2abytes.create_reader(), }, } } diff --git a/crates/brk_indexer/src/stores.rs b/crates/brk_indexer/src/stores.rs index 08efd3010..d317ba11f 100644 --- a/crates/brk_indexer/src/stores.rs +++ b/crates/brk_indexer/src/stores.rs @@ -8,8 +8,8 @@ use brk_types::{ OutputType, StoredString, TxIndex, TxOutIndex, TxidPrefix, TypeIndex, Unit, Version, Vout, }; use fjall::{Database, PersistMode}; -use log::info; use rayon::prelude::*; +use tracing::info; use vecdb::{AnyVec, TypedVecIterator, VecIndex, VecIterator}; use crate::{Indexes, constants::DUPLICATE_TXID_PREFIXES}; @@ -120,6 +120,13 @@ impl Stores { } pub fn starting_height(&self) -> Height { + self.iter_any() + .map(|store| store.height().map(Height::incremented).unwrap_or_default()) + .min() + .unwrap() + } + + fn iter_any(&self) -> impl Iterator { [ &self.blockhashprefix_to_height as &dyn AnyStore, &self.height_to_coinbase_tag, @@ -141,13 +148,9 @@ impl Stores { .values() .map(|s| s as &dyn AnyStore), ) - .map(|store| store.height().map(Height::incremented).unwrap_or_default()) - .min() - .unwrap() } - pub fn commit(&mut self, height: Height) -> Result<()> { - let i = Instant::now(); + fn par_iter_any_mut(&mut self) -> impl ParallelIterator { [ &mut self.blockhashprefix_to_height as &mut dyn AnyStore, &mut self.height_to_coinbase_tag, @@ -169,7 +172,12 @@ impl Stores { .par_values_mut() .map(|s| s as &mut dyn AnyStore), ) - .try_for_each(|store| store.commit(height))?; + } + + pub fn commit(&mut self, height: Height) -> Result<()> { + let i = Instant::now(); + self.par_iter_any_mut() + .try_for_each(|store| store.commit(height))?; info!("Stores committed in {:?}", i.elapsed()); let i = Instant::now(); @@ -204,8 +212,8 @@ impl Stores { } if starting_indexes.height != Height::ZERO { - vecs.block - .height_to_blockhash + vecs.blocks + .blockhash .iter()? .skip(starting_indexes.height.to_usize()) .map(BlockHashPrefix::from) @@ -213,7 +221,7 @@ impl Stores { self.blockhashprefix_to_height.remove(prefix); }); - (starting_indexes.height.to_usize()..vecs.block.height_to_blockhash.len()) + (starting_indexes.height.to_usize()..vecs.blocks.blockhash.len()) .map(Height::from) .for_each(|h| { self.height_to_coinbase_tag.remove(h); @@ -241,8 +249,8 @@ impl Stores { } if starting_indexes.txindex != TxIndex::ZERO { - vecs.tx - .txindex_to_txid + vecs.transactions + .txid .iter()? .enumerate() .skip(starting_indexes.txindex.to_usize()) @@ -266,14 +274,14 @@ impl Stores { } if starting_indexes.txoutindex != TxOutIndex::ZERO { - let mut txoutindex_to_txindex_iter = vecs.txout.txoutindex_to_txindex.iter()?; + let mut txoutindex_to_txindex_iter = vecs.outputs.txindex.iter()?; let mut txindex_to_first_txoutindex_iter = - vecs.tx.txindex_to_first_txoutindex.iter()?; - let mut txoutindex_to_outputtype_iter = vecs.txout.txoutindex_to_outputtype.iter()?; - let mut txoutindex_to_typeindex_iter = vecs.txout.txoutindex_to_typeindex.iter()?; + vecs.transactions.first_txoutindex.iter()?; + let mut txoutindex_to_outputtype_iter = vecs.outputs.outputtype.iter()?; + let mut txoutindex_to_typeindex_iter = vecs.outputs.typeindex.iter()?; for txoutindex in - starting_indexes.txoutindex.to_usize()..vecs.txout.txoutindex_to_outputtype.len() + starting_indexes.txoutindex.to_usize()..vecs.outputs.outputtype.len() { let outputtype = txoutindex_to_outputtype_iter.get_at_unwrap(txoutindex); if !outputtype.is_address() { @@ -304,14 +312,14 @@ impl Stores { // Collect outputs that were spent after the rollback point // We need to: 1) reset their spend status, 2) restore address stores let mut txindex_to_first_txoutindex_iter = - vecs.tx.txindex_to_first_txoutindex.iter()?; - let mut txoutindex_to_outputtype_iter = vecs.txout.txoutindex_to_outputtype.iter()?; - let mut txoutindex_to_typeindex_iter = vecs.txout.txoutindex_to_typeindex.iter()?; - let mut txinindex_to_txindex_iter = vecs.txin.txinindex_to_txindex.iter()?; + vecs.transactions.first_txoutindex.iter()?; + let mut txoutindex_to_outputtype_iter = vecs.outputs.outputtype.iter()?; + let mut txoutindex_to_typeindex_iter = vecs.outputs.typeindex.iter()?; + let mut txinindex_to_txindex_iter = vecs.inputs.txindex.iter()?; let outputs_to_unspend: Vec<_> = vecs - .txin - .txinindex_to_outpoint + .inputs + .outpoint .iter()? .enumerate() .skip(starting_indexes.txinindex.to_usize()) @@ -360,7 +368,29 @@ impl Stores { unreachable!(); } - self.commit(starting_indexes.height.decremented().unwrap_or_default())?; + // Force-lower the height on all stores before committing. + // This is necessary because commit() only updates the height if needed, + // but during rollback we must lower it even if it's already higher. + let rollback_height = starting_indexes.height.decremented().unwrap_or_default(); + self.par_iter_any_mut() + .try_for_each(|store| store.export_meta(rollback_height))?; + + self.commit(rollback_height)?; + + Ok(()) + } + + pub fn reset(&mut self) -> Result<()> { + info!("Resetting stores..."); + + // Clear all keyspaces + self.iter_any().try_for_each(|store| -> Result<()> { + store.keyspace().clear()?; + Ok(()) + })?; + + // Persist the cleared state + self.db.persist(PersistMode::SyncAll)?; Ok(()) } diff --git a/crates/brk_indexer/src/vecs/address.rs b/crates/brk_indexer/src/vecs/address.rs deleted file mode 100644 index a2aa270b5..000000000 --- a/crates/brk_indexer/src/vecs/address.rs +++ /dev/null @@ -1,309 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{ - AddressBytes, AddressHash, Height, OutputType, P2AAddressIndex, P2ABytes, P2PK33AddressIndex, - P2PK33Bytes, P2PK65AddressIndex, P2PK65Bytes, P2PKHAddressIndex, P2PKHBytes, P2SHAddressIndex, - P2SHBytes, P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex, P2WPKHBytes, P2WSHAddressIndex, - P2WSHBytes, TypeIndex, Version, -}; -use rayon::prelude::*; -use vecdb::{ - AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Reader, Stamp, - TypedVecIterator, -}; - -use crate::parallel_import; - -#[derive(Clone, Traversable)] -pub struct AddressVecs { - // Height to first address index (per address type) - pub height_to_first_p2pk65addressindex: PcoVec, - pub height_to_first_p2pk33addressindex: PcoVec, - pub height_to_first_p2pkhaddressindex: PcoVec, - pub height_to_first_p2shaddressindex: PcoVec, - pub height_to_first_p2wpkhaddressindex: PcoVec, - pub height_to_first_p2wshaddressindex: PcoVec, - pub height_to_first_p2traddressindex: PcoVec, - pub height_to_first_p2aaddressindex: PcoVec, - // Address index to bytes (per address type) - pub p2pk65addressindex_to_p2pk65bytes: BytesVec, - pub p2pk33addressindex_to_p2pk33bytes: BytesVec, - pub p2pkhaddressindex_to_p2pkhbytes: BytesVec, - pub p2shaddressindex_to_p2shbytes: BytesVec, - pub p2wpkhaddressindex_to_p2wpkhbytes: BytesVec, - pub p2wshaddressindex_to_p2wshbytes: BytesVec, - pub p2traddressindex_to_p2trbytes: BytesVec, - pub p2aaddressindex_to_p2abytes: BytesVec, -} - -impl AddressVecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - let ( - height_to_first_p2pk65addressindex, - height_to_first_p2pk33addressindex, - height_to_first_p2pkhaddressindex, - height_to_first_p2shaddressindex, - height_to_first_p2wpkhaddressindex, - height_to_first_p2wshaddressindex, - height_to_first_p2traddressindex, - height_to_first_p2aaddressindex, - p2pk65addressindex_to_p2pk65bytes, - p2pk33addressindex_to_p2pk33bytes, - p2pkhaddressindex_to_p2pkhbytes, - p2shaddressindex_to_p2shbytes, - p2wpkhaddressindex_to_p2wpkhbytes, - p2wshaddressindex_to_p2wshbytes, - p2traddressindex_to_p2trbytes, - p2aaddressindex_to_p2abytes, - ) = parallel_import! { - height_to_first_p2pk65addressindex = PcoVec::forced_import(db, "first_p2pk65addressindex", version), - height_to_first_p2pk33addressindex = PcoVec::forced_import(db, "first_p2pk33addressindex", version), - height_to_first_p2pkhaddressindex = PcoVec::forced_import(db, "first_p2pkhaddressindex", version), - height_to_first_p2shaddressindex = PcoVec::forced_import(db, "first_p2shaddressindex", version), - height_to_first_p2wpkhaddressindex = PcoVec::forced_import(db, "first_p2wpkhaddressindex", version), - height_to_first_p2wshaddressindex = PcoVec::forced_import(db, "first_p2wshaddressindex", version), - height_to_first_p2traddressindex = PcoVec::forced_import(db, "first_p2traddressindex", version), - height_to_first_p2aaddressindex = PcoVec::forced_import(db, "first_p2aaddressindex", version), - p2pk65addressindex_to_p2pk65bytes = BytesVec::forced_import(db, "p2pk65bytes", version), - p2pk33addressindex_to_p2pk33bytes = BytesVec::forced_import(db, "p2pk33bytes", version), - p2pkhaddressindex_to_p2pkhbytes = BytesVec::forced_import(db, "p2pkhbytes", version), - p2shaddressindex_to_p2shbytes = BytesVec::forced_import(db, "p2shbytes", version), - p2wpkhaddressindex_to_p2wpkhbytes = BytesVec::forced_import(db, "p2wpkhbytes", version), - p2wshaddressindex_to_p2wshbytes = BytesVec::forced_import(db, "p2wshbytes", version), - p2traddressindex_to_p2trbytes = BytesVec::forced_import(db, "p2trbytes", version), - p2aaddressindex_to_p2abytes = BytesVec::forced_import(db, "p2abytes", version), - }; - Ok(Self { - height_to_first_p2pk65addressindex, - height_to_first_p2pk33addressindex, - height_to_first_p2pkhaddressindex, - height_to_first_p2shaddressindex, - height_to_first_p2wpkhaddressindex, - height_to_first_p2wshaddressindex, - height_to_first_p2traddressindex, - height_to_first_p2aaddressindex, - p2pk65addressindex_to_p2pk65bytes, - p2pk33addressindex_to_p2pk33bytes, - p2pkhaddressindex_to_p2pkhbytes, - p2shaddressindex_to_p2shbytes, - p2wpkhaddressindex_to_p2wpkhbytes, - p2wshaddressindex_to_p2wshbytes, - p2traddressindex_to_p2trbytes, - p2aaddressindex_to_p2abytes, - }) - } - - #[allow(clippy::too_many_arguments)] - pub fn truncate( - &mut self, - height: Height, - p2pk65addressindex: P2PK65AddressIndex, - p2pk33addressindex: P2PK33AddressIndex, - p2pkhaddressindex: P2PKHAddressIndex, - p2shaddressindex: P2SHAddressIndex, - p2wpkhaddressindex: P2WPKHAddressIndex, - p2wshaddressindex: P2WSHAddressIndex, - p2traddressindex: P2TRAddressIndex, - p2aaddressindex: P2AAddressIndex, - stamp: Stamp, - ) -> Result<()> { - self.height_to_first_p2pk65addressindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_p2pk33addressindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_p2pkhaddressindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_p2shaddressindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_p2wpkhaddressindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_p2wshaddressindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_p2traddressindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_p2aaddressindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.p2pk65addressindex_to_p2pk65bytes - .truncate_if_needed_with_stamp(p2pk65addressindex, stamp)?; - self.p2pk33addressindex_to_p2pk33bytes - .truncate_if_needed_with_stamp(p2pk33addressindex, stamp)?; - self.p2pkhaddressindex_to_p2pkhbytes - .truncate_if_needed_with_stamp(p2pkhaddressindex, stamp)?; - self.p2shaddressindex_to_p2shbytes - .truncate_if_needed_with_stamp(p2shaddressindex, stamp)?; - self.p2wpkhaddressindex_to_p2wpkhbytes - .truncate_if_needed_with_stamp(p2wpkhaddressindex, stamp)?; - self.p2wshaddressindex_to_p2wshbytes - .truncate_if_needed_with_stamp(p2wshaddressindex, stamp)?; - self.p2traddressindex_to_p2trbytes - .truncate_if_needed_with_stamp(p2traddressindex, stamp)?; - self.p2aaddressindex_to_p2abytes - .truncate_if_needed_with_stamp(p2aaddressindex, stamp)?; - Ok(()) - } - - pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { - [ - &mut self.height_to_first_p2pk65addressindex as &mut dyn AnyStoredVec, - &mut self.height_to_first_p2pk33addressindex, - &mut self.height_to_first_p2pkhaddressindex, - &mut self.height_to_first_p2shaddressindex, - &mut self.height_to_first_p2wpkhaddressindex, - &mut self.height_to_first_p2wshaddressindex, - &mut self.height_to_first_p2traddressindex, - &mut self.height_to_first_p2aaddressindex, - &mut self.p2pk65addressindex_to_p2pk65bytes, - &mut self.p2pk33addressindex_to_p2pk33bytes, - &mut self.p2pkhaddressindex_to_p2pkhbytes, - &mut self.p2shaddressindex_to_p2shbytes, - &mut self.p2wpkhaddressindex_to_p2wpkhbytes, - &mut self.p2wshaddressindex_to_p2wshbytes, - &mut self.p2traddressindex_to_p2trbytes, - &mut self.p2aaddressindex_to_p2abytes, - ] - .into_par_iter() - } - - /// Get address bytes by output type, using the reader for the specific address type. - /// Returns None if the index doesn't exist yet. - pub fn get_bytes_by_type( - &self, - addresstype: OutputType, - typeindex: TypeIndex, - reader: &Reader, - ) -> Result> { - match addresstype { - OutputType::P2PK65 => self - .p2pk65addressindex_to_p2pk65bytes - .get_pushed_or_read(typeindex.into(), reader) - .map(|opt| opt.map(AddressBytes::from)), - OutputType::P2PK33 => self - .p2pk33addressindex_to_p2pk33bytes - .get_pushed_or_read(typeindex.into(), reader) - .map(|opt| opt.map(AddressBytes::from)), - OutputType::P2PKH => self - .p2pkhaddressindex_to_p2pkhbytes - .get_pushed_or_read(typeindex.into(), reader) - .map(|opt| opt.map(AddressBytes::from)), - OutputType::P2SH => self - .p2shaddressindex_to_p2shbytes - .get_pushed_or_read(typeindex.into(), reader) - .map(|opt| opt.map(AddressBytes::from)), - OutputType::P2WPKH => self - .p2wpkhaddressindex_to_p2wpkhbytes - .get_pushed_or_read(typeindex.into(), reader) - .map(|opt| opt.map(AddressBytes::from)), - OutputType::P2WSH => self - .p2wshaddressindex_to_p2wshbytes - .get_pushed_or_read(typeindex.into(), reader) - .map(|opt| opt.map(AddressBytes::from)), - OutputType::P2TR => self - .p2traddressindex_to_p2trbytes - .get_pushed_or_read(typeindex.into(), reader) - .map(|opt| opt.map(AddressBytes::from)), - OutputType::P2A => self - .p2aaddressindex_to_p2abytes - .get_pushed_or_read(typeindex.into(), reader) - .map(|opt| opt.map(AddressBytes::from)), - _ => unreachable!("get_bytes_by_type called with non-address type"), - } - .map_err(|e| e.into()) - } - - pub fn push_bytes_if_needed(&mut self, index: TypeIndex, bytes: AddressBytes) -> Result<()> { - match bytes { - AddressBytes::P2PK65(bytes) => self - .p2pk65addressindex_to_p2pk65bytes - .checked_push(index.into(), *bytes)?, - AddressBytes::P2PK33(bytes) => self - .p2pk33addressindex_to_p2pk33bytes - .checked_push(index.into(), *bytes)?, - AddressBytes::P2PKH(bytes) => self - .p2pkhaddressindex_to_p2pkhbytes - .checked_push(index.into(), *bytes)?, - AddressBytes::P2SH(bytes) => self - .p2shaddressindex_to_p2shbytes - .checked_push(index.into(), *bytes)?, - AddressBytes::P2WPKH(bytes) => self - .p2wpkhaddressindex_to_p2wpkhbytes - .checked_push(index.into(), *bytes)?, - AddressBytes::P2WSH(bytes) => self - .p2wshaddressindex_to_p2wshbytes - .checked_push(index.into(), *bytes)?, - AddressBytes::P2TR(bytes) => self - .p2traddressindex_to_p2trbytes - .checked_push(index.into(), *bytes)?, - AddressBytes::P2A(bytes) => self - .p2aaddressindex_to_p2abytes - .checked_push(index.into(), *bytes)?, - }; - Ok(()) - } - - /// Iterate address hashes starting from a given height (for rollback). - /// Returns an iterator of AddressHash values for all addresses of the given type - /// that were added at or after the given height. - pub fn iter_hashes_from( - &self, - address_type: OutputType, - height: Height, - ) -> Result + '_>> { - macro_rules! make_iter { - ($height_vec:expr, $bytes_vec:expr) => {{ - match $height_vec.read_once(height) { - Ok(mut index) => { - let mut iter = $bytes_vec.iter()?; - Ok(Box::new(std::iter::from_fn(move || { - iter.get(index).map(|typedbytes| { - let bytes = AddressBytes::from(typedbytes); - index.increment(); - AddressHash::from(&bytes) - }) - })) - as Box + '_>) - } - Err(_) => { - Ok(Box::new(std::iter::empty()) - as Box + '_>) - } - } - }}; - } - - match address_type { - OutputType::P2PK65 => make_iter!( - self.height_to_first_p2pk65addressindex, - self.p2pk65addressindex_to_p2pk65bytes - ), - OutputType::P2PK33 => make_iter!( - self.height_to_first_p2pk33addressindex, - self.p2pk33addressindex_to_p2pk33bytes - ), - OutputType::P2PKH => make_iter!( - self.height_to_first_p2pkhaddressindex, - self.p2pkhaddressindex_to_p2pkhbytes - ), - OutputType::P2SH => make_iter!( - self.height_to_first_p2shaddressindex, - self.p2shaddressindex_to_p2shbytes - ), - OutputType::P2WPKH => make_iter!( - self.height_to_first_p2wpkhaddressindex, - self.p2wpkhaddressindex_to_p2wpkhbytes - ), - OutputType::P2WSH => make_iter!( - self.height_to_first_p2wshaddressindex, - self.p2wshaddressindex_to_p2wshbytes - ), - OutputType::P2TR => make_iter!( - self.height_to_first_p2traddressindex, - self.p2traddressindex_to_p2trbytes - ), - OutputType::P2A => make_iter!( - self.height_to_first_p2aaddressindex, - self.p2aaddressindex_to_p2abytes - ), - _ => Ok(Box::new(std::iter::empty())), - } - } -} diff --git a/crates/brk_indexer/src/vecs/addresses.rs b/crates/brk_indexer/src/vecs/addresses.rs new file mode 100644 index 000000000..051ee6a6b --- /dev/null +++ b/crates/brk_indexer/src/vecs/addresses.rs @@ -0,0 +1,309 @@ +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{ + AddressBytes, AddressHash, Height, OutputType, P2AAddressIndex, P2ABytes, P2PK33AddressIndex, + P2PK33Bytes, P2PK65AddressIndex, P2PK65Bytes, P2PKHAddressIndex, P2PKHBytes, P2SHAddressIndex, + P2SHBytes, P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex, P2WPKHBytes, P2WSHAddressIndex, + P2WSHBytes, TypeIndex, Version, +}; +use rayon::prelude::*; +use vecdb::{ + AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Reader, Stamp, + TypedVecIterator, +}; + +use crate::parallel_import; + +#[derive(Clone, Traversable)] +pub struct AddressesVecs { + // Height to first address index (per address type) + pub first_p2pk65addressindex: PcoVec, + pub first_p2pk33addressindex: PcoVec, + pub first_p2pkhaddressindex: PcoVec, + pub first_p2shaddressindex: PcoVec, + pub first_p2wpkhaddressindex: PcoVec, + pub first_p2wshaddressindex: PcoVec, + pub first_p2traddressindex: PcoVec, + pub first_p2aaddressindex: PcoVec, + // Address index to bytes (per address type) + pub p2pk65bytes: BytesVec, + pub p2pk33bytes: BytesVec, + pub p2pkhbytes: BytesVec, + pub p2shbytes: BytesVec, + pub p2wpkhbytes: BytesVec, + pub p2wshbytes: BytesVec, + pub p2trbytes: BytesVec, + pub p2abytes: BytesVec, +} + +impl AddressesVecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + first_p2pk65addressindex, + first_p2pk33addressindex, + first_p2pkhaddressindex, + first_p2shaddressindex, + first_p2wpkhaddressindex, + first_p2wshaddressindex, + first_p2traddressindex, + first_p2aaddressindex, + p2pk65bytes, + p2pk33bytes, + p2pkhbytes, + p2shbytes, + p2wpkhbytes, + p2wshbytes, + p2trbytes, + p2abytes, + ) = parallel_import! { + first_p2pk65addressindex = PcoVec::forced_import(db, "first_p2pk65addressindex", version), + first_p2pk33addressindex = PcoVec::forced_import(db, "first_p2pk33addressindex", version), + first_p2pkhaddressindex = PcoVec::forced_import(db, "first_p2pkhaddressindex", version), + first_p2shaddressindex = PcoVec::forced_import(db, "first_p2shaddressindex", version), + first_p2wpkhaddressindex = PcoVec::forced_import(db, "first_p2wpkhaddressindex", version), + first_p2wshaddressindex = PcoVec::forced_import(db, "first_p2wshaddressindex", version), + first_p2traddressindex = PcoVec::forced_import(db, "first_p2traddressindex", version), + first_p2aaddressindex = PcoVec::forced_import(db, "first_p2aaddressindex", version), + p2pk65bytes = BytesVec::forced_import(db, "p2pk65bytes", version), + p2pk33bytes = BytesVec::forced_import(db, "p2pk33bytes", version), + p2pkhbytes = BytesVec::forced_import(db, "p2pkhbytes", version), + p2shbytes = BytesVec::forced_import(db, "p2shbytes", version), + p2wpkhbytes = BytesVec::forced_import(db, "p2wpkhbytes", version), + p2wshbytes = BytesVec::forced_import(db, "p2wshbytes", version), + p2trbytes = BytesVec::forced_import(db, "p2trbytes", version), + p2abytes = BytesVec::forced_import(db, "p2abytes", version), + }; + Ok(Self { + first_p2pk65addressindex, + first_p2pk33addressindex, + first_p2pkhaddressindex, + first_p2shaddressindex, + first_p2wpkhaddressindex, + first_p2wshaddressindex, + first_p2traddressindex, + first_p2aaddressindex, + p2pk65bytes, + p2pk33bytes, + p2pkhbytes, + p2shbytes, + p2wpkhbytes, + p2wshbytes, + p2trbytes, + p2abytes, + }) + } + + #[allow(clippy::too_many_arguments)] + pub fn truncate( + &mut self, + height: Height, + p2pk65addressindex: P2PK65AddressIndex, + p2pk33addressindex: P2PK33AddressIndex, + p2pkhaddressindex: P2PKHAddressIndex, + p2shaddressindex: P2SHAddressIndex, + p2wpkhaddressindex: P2WPKHAddressIndex, + p2wshaddressindex: P2WSHAddressIndex, + p2traddressindex: P2TRAddressIndex, + p2aaddressindex: P2AAddressIndex, + stamp: Stamp, + ) -> Result<()> { + self.first_p2pk65addressindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_p2pk33addressindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_p2pkhaddressindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_p2shaddressindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_p2wpkhaddressindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_p2wshaddressindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_p2traddressindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_p2aaddressindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.p2pk65bytes + .truncate_if_needed_with_stamp(p2pk65addressindex, stamp)?; + self.p2pk33bytes + .truncate_if_needed_with_stamp(p2pk33addressindex, stamp)?; + self.p2pkhbytes + .truncate_if_needed_with_stamp(p2pkhaddressindex, stamp)?; + self.p2shbytes + .truncate_if_needed_with_stamp(p2shaddressindex, stamp)?; + self.p2wpkhbytes + .truncate_if_needed_with_stamp(p2wpkhaddressindex, stamp)?; + self.p2wshbytes + .truncate_if_needed_with_stamp(p2wshaddressindex, stamp)?; + self.p2trbytes + .truncate_if_needed_with_stamp(p2traddressindex, stamp)?; + self.p2abytes + .truncate_if_needed_with_stamp(p2aaddressindex, stamp)?; + Ok(()) + } + + pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { + [ + &mut self.first_p2pk65addressindex as &mut dyn AnyStoredVec, + &mut self.first_p2pk33addressindex, + &mut self.first_p2pkhaddressindex, + &mut self.first_p2shaddressindex, + &mut self.first_p2wpkhaddressindex, + &mut self.first_p2wshaddressindex, + &mut self.first_p2traddressindex, + &mut self.first_p2aaddressindex, + &mut self.p2pk65bytes, + &mut self.p2pk33bytes, + &mut self.p2pkhbytes, + &mut self.p2shbytes, + &mut self.p2wpkhbytes, + &mut self.p2wshbytes, + &mut self.p2trbytes, + &mut self.p2abytes, + ] + .into_par_iter() + } + + /// Get address bytes by output type, using the reader for the specific address type. + /// Returns None if the index doesn't exist yet. + pub fn get_bytes_by_type( + &self, + addresstype: OutputType, + typeindex: TypeIndex, + reader: &Reader, + ) -> Result> { + match addresstype { + OutputType::P2PK65 => self + .p2pk65bytes + .get_pushed_or_read(typeindex.into(), reader) + .map(|opt| opt.map(AddressBytes::from)), + OutputType::P2PK33 => self + .p2pk33bytes + .get_pushed_or_read(typeindex.into(), reader) + .map(|opt| opt.map(AddressBytes::from)), + OutputType::P2PKH => self + .p2pkhbytes + .get_pushed_or_read(typeindex.into(), reader) + .map(|opt| opt.map(AddressBytes::from)), + OutputType::P2SH => self + .p2shbytes + .get_pushed_or_read(typeindex.into(), reader) + .map(|opt| opt.map(AddressBytes::from)), + OutputType::P2WPKH => self + .p2wpkhbytes + .get_pushed_or_read(typeindex.into(), reader) + .map(|opt| opt.map(AddressBytes::from)), + OutputType::P2WSH => self + .p2wshbytes + .get_pushed_or_read(typeindex.into(), reader) + .map(|opt| opt.map(AddressBytes::from)), + OutputType::P2TR => self + .p2trbytes + .get_pushed_or_read(typeindex.into(), reader) + .map(|opt| opt.map(AddressBytes::from)), + OutputType::P2A => self + .p2abytes + .get_pushed_or_read(typeindex.into(), reader) + .map(|opt| opt.map(AddressBytes::from)), + _ => unreachable!("get_bytes_by_type called with non-address type"), + } + .map_err(|e| e.into()) + } + + pub fn push_bytes_if_needed(&mut self, index: TypeIndex, bytes: AddressBytes) -> Result<()> { + match bytes { + AddressBytes::P2PK65(bytes) => self + .p2pk65bytes + .checked_push(index.into(), *bytes)?, + AddressBytes::P2PK33(bytes) => self + .p2pk33bytes + .checked_push(index.into(), *bytes)?, + AddressBytes::P2PKH(bytes) => self + .p2pkhbytes + .checked_push(index.into(), *bytes)?, + AddressBytes::P2SH(bytes) => self + .p2shbytes + .checked_push(index.into(), *bytes)?, + AddressBytes::P2WPKH(bytes) => self + .p2wpkhbytes + .checked_push(index.into(), *bytes)?, + AddressBytes::P2WSH(bytes) => self + .p2wshbytes + .checked_push(index.into(), *bytes)?, + AddressBytes::P2TR(bytes) => self + .p2trbytes + .checked_push(index.into(), *bytes)?, + AddressBytes::P2A(bytes) => self + .p2abytes + .checked_push(index.into(), *bytes)?, + }; + Ok(()) + } + + /// Iterate address hashes starting from a given height (for rollback). + /// Returns an iterator of AddressHash values for all addresses of the given type + /// that were added at or after the given height. + pub fn iter_hashes_from( + &self, + address_type: OutputType, + height: Height, + ) -> Result + '_>> { + macro_rules! make_iter { + ($height_vec:expr, $bytes_vec:expr) => {{ + match $height_vec.read_once(height) { + Ok(mut index) => { + let mut iter = $bytes_vec.iter()?; + Ok(Box::new(std::iter::from_fn(move || { + iter.get(index).map(|typedbytes| { + let bytes = AddressBytes::from(typedbytes); + index.increment(); + AddressHash::from(&bytes) + }) + })) + as Box + '_>) + } + Err(_) => { + Ok(Box::new(std::iter::empty()) + as Box + '_>) + } + } + }}; + } + + match address_type { + OutputType::P2PK65 => make_iter!( + self.first_p2pk65addressindex, + self.p2pk65bytes + ), + OutputType::P2PK33 => make_iter!( + self.first_p2pk33addressindex, + self.p2pk33bytes + ), + OutputType::P2PKH => make_iter!( + self.first_p2pkhaddressindex, + self.p2pkhbytes + ), + OutputType::P2SH => make_iter!( + self.first_p2shaddressindex, + self.p2shbytes + ), + OutputType::P2WPKH => make_iter!( + self.first_p2wpkhaddressindex, + self.p2wpkhbytes + ), + OutputType::P2WSH => make_iter!( + self.first_p2wshaddressindex, + self.p2wshbytes + ), + OutputType::P2TR => make_iter!( + self.first_p2traddressindex, + self.p2trbytes + ), + OutputType::P2A => make_iter!( + self.first_p2aaddressindex, + self.p2abytes + ), + _ => Ok(Box::new(std::iter::empty())), + } + } +} diff --git a/crates/brk_indexer/src/vecs/blocks.rs b/crates/brk_indexer/src/vecs/blocks.rs index bcabf1ba5..0059567b6 100644 --- a/crates/brk_indexer/src/vecs/blocks.rs +++ b/crates/brk_indexer/src/vecs/blocks.rs @@ -7,60 +7,54 @@ use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, P use crate::parallel_import; #[derive(Clone, Traversable)] -pub struct BlockVecs { - pub height_to_blockhash: BytesVec, - pub height_to_difficulty: PcoVec, +pub struct BlocksVecs { + pub blockhash: BytesVec, + pub difficulty: PcoVec, /// Doesn't guarantee continuity due to possible reorgs and more generally the nature of mining - pub height_to_timestamp: PcoVec, - pub height_to_total_size: PcoVec, - pub height_to_weight: PcoVec, + pub timestamp: PcoVec, + pub total_size: PcoVec, + pub weight: PcoVec, } -impl BlockVecs { +impl BlocksVecs { pub fn forced_import(db: &Database, version: Version) -> Result { - let ( - height_to_blockhash, - height_to_difficulty, - height_to_timestamp, - height_to_total_size, - height_to_weight, - ) = parallel_import! { - height_to_blockhash = BytesVec::forced_import(db, "blockhash", version), - height_to_difficulty = PcoVec::forced_import(db, "difficulty", version), - height_to_timestamp = PcoVec::forced_import(db, "timestamp", version), - height_to_total_size = PcoVec::forced_import(db, "total_size", version), - height_to_weight = PcoVec::forced_import(db, "weight", version), + let (blockhash, difficulty, timestamp, total_size, weight) = parallel_import! { + blockhash = BytesVec::forced_import(db, "blockhash", version), + difficulty = PcoVec::forced_import(db, "difficulty", version), + timestamp = PcoVec::forced_import(db, "timestamp", version), + total_size = PcoVec::forced_import(db, "total_size", version), + weight = PcoVec::forced_import(db, "weight", version), }; Ok(Self { - height_to_blockhash, - height_to_difficulty, - height_to_timestamp, - height_to_total_size, - height_to_weight, + blockhash, + difficulty, + timestamp, + total_size, + weight, }) } pub fn truncate(&mut self, height: Height, stamp: Stamp) -> Result<()> { - self.height_to_blockhash + self.blockhash .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_difficulty + self.difficulty .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_timestamp + self.timestamp .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_total_size + self.total_size .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_weight + self.weight .truncate_if_needed_with_stamp(height, stamp)?; Ok(()) } pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { [ - &mut self.height_to_blockhash as &mut dyn AnyStoredVec, - &mut self.height_to_difficulty, - &mut self.height_to_timestamp, - &mut self.height_to_total_size, - &mut self.height_to_weight, + &mut self.blockhash as &mut dyn AnyStoredVec, + &mut self.difficulty, + &mut self.timestamp, + &mut self.total_size, + &mut self.weight, ] .into_par_iter() } diff --git a/crates/brk_indexer/src/vecs/inputs.rs b/crates/brk_indexer/src/vecs/inputs.rs new file mode 100644 index 000000000..3ba65ecaf --- /dev/null +++ b/crates/brk_indexer/src/vecs/inputs.rs @@ -0,0 +1,75 @@ +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{ + Height, OutPoint, OutputType, StoredU32, TxInIndex, TxIndex, TypeIndex, Version, +}; +use rayon::prelude::*; +use vecdb::{AnyStoredVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; + +use crate::parallel_import; + +#[derive(Clone, Traversable)] +pub struct InputsVecs { + pub first_txinindex: PcoVec, + pub outpoint: PcoVec, + pub txindex: PcoVec, + pub outputtype: PcoVec, + pub typeindex: PcoVec, + pub witness_size: PcoVec, +} + +impl InputsVecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + first_txinindex, + outpoint, + txindex, + outputtype, + typeindex, + witness_size, + ) = parallel_import! { + first_txinindex = PcoVec::forced_import(db, "first_txinindex", version), + outpoint = PcoVec::forced_import(db, "outpoint", version), + txindex = PcoVec::forced_import(db, "txindex", version), + outputtype = PcoVec::forced_import(db, "outputtype", version), + typeindex = PcoVec::forced_import(db, "typeindex", version), + witness_size = PcoVec::forced_import(db, "witness_size", version), + }; + Ok(Self { + first_txinindex, + outpoint, + txindex, + outputtype, + typeindex, + witness_size, + }) + } + + pub fn truncate(&mut self, height: Height, txinindex: TxInIndex, stamp: Stamp) -> Result<()> { + self.first_txinindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.outpoint + .truncate_if_needed_with_stamp(txinindex, stamp)?; + self.txindex + .truncate_if_needed_with_stamp(txinindex, stamp)?; + self.outputtype + .truncate_if_needed_with_stamp(txinindex, stamp)?; + self.typeindex + .truncate_if_needed_with_stamp(txinindex, stamp)?; + self.witness_size + .truncate_if_needed_with_stamp(txinindex, stamp)?; + Ok(()) + } + + pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { + [ + &mut self.first_txinindex as &mut dyn AnyStoredVec, + &mut self.outpoint, + &mut self.txindex, + &mut self.outputtype, + &mut self.typeindex, + &mut self.witness_size, + ] + .into_par_iter() + } +} diff --git a/crates/brk_indexer/src/vecs/mod.rs b/crates/brk_indexer/src/vecs/mod.rs index 0b057410b..60eadf739 100644 --- a/crates/brk_indexer/src/vecs/mod.rs +++ b/crates/brk_indexer/src/vecs/mod.rs @@ -8,90 +8,90 @@ use vecdb::{AnyStoredVec, Database, PAGE_SIZE, Reader, Stamp}; use crate::parallel_import; -mod address; +mod addresses; mod blocks; +mod inputs; mod macros; -mod output; -mod tx; -mod txin; -mod txout; +mod outputs; +mod scripts; +mod transactions; -pub use address::*; +pub use addresses::*; pub use blocks::*; -pub use output::*; -pub use tx::*; -pub use txin::*; -pub use txout::*; +pub use inputs::*; +pub use outputs::*; +pub use scripts::*; +pub use transactions::*; use crate::Indexes; #[derive(Clone, Traversable)] pub struct Vecs { db: Database, - pub block: BlockVecs, - pub tx: TxVecs, - pub txin: TxinVecs, - pub txout: TxoutVecs, - pub address: AddressVecs, - pub output: OutputVecs, + pub blocks: BlocksVecs, + pub transactions: TransactionsVecs, + pub inputs: InputsVecs, + pub outputs: OutputsVecs, + pub addresses: AddressesVecs, + pub scripts: ScriptsVecs, } impl Vecs { pub fn forced_import(parent: &Path, version: Version) -> Result { - log::debug!("Opening vecs database..."); + tracing::debug!("Opening vecs database..."); let db = Database::open(&parent.join("vecs"))?; - log::debug!("Setting min len..."); + tracing::debug!("Setting min len..."); db.set_min_len(PAGE_SIZE * 50_000_000)?; - log::debug!("Importing sub-vecs in parallel..."); - let (block, tx, txin, txout, address, output) = parallel_import! { - block = { - log::debug!("Importing BlockVecs..."); - let r = BlockVecs::forced_import(&db, version); - log::debug!("BlockVecs imported."); + tracing::debug!("Importing sub-vecs in parallel..."); + let (blocks, transactions, inputs, outputs, addresses, scripts) = parallel_import! { + blocks = { + tracing::debug!("Importing BlocksVecs..."); + let r = BlocksVecs::forced_import(&db, version); + tracing::debug!("BlocksVecs imported."); r }, - tx = { - log::debug!("Importing TxVecs..."); - let r = TxVecs::forced_import(&db, version); - log::debug!("TxVecs imported."); + transactions = { + tracing::debug!("Importing TransactionsVecs..."); + let r = TransactionsVecs::forced_import(&db, version); + tracing::debug!("TransactionsVecs imported."); r }, - txin = { - log::debug!("Importing TxinVecs..."); - let r = TxinVecs::forced_import(&db, version); - log::debug!("TxinVecs imported."); + inputs = { + tracing::debug!("Importing InputsVecs..."); + let r = InputsVecs::forced_import(&db, version); + tracing::debug!("InputsVecs imported."); r }, - txout = { - log::debug!("Importing TxoutVecs..."); - let r = TxoutVecs::forced_import(&db, version); - log::debug!("TxoutVecs imported."); + outputs = { + tracing::debug!("Importing OutputsVecs..."); + let r = OutputsVecs::forced_import(&db, version); + tracing::debug!("OutputsVecs imported."); r }, - address = { - log::debug!("Importing AddressVecs..."); - let r = AddressVecs::forced_import(&db, version); - log::debug!("AddressVecs imported."); + addresses = { + tracing::debug!("Importing AddressesVecs..."); + let r = AddressesVecs::forced_import(&db, version); + tracing::debug!("AddressesVecs imported."); r }, - output = { - log::debug!("Importing OutputVecs..."); - let r = OutputVecs::forced_import(&db, version); - log::debug!("OutputVecs imported."); + scripts = { + tracing::debug!("Importing ScriptsVecs..."); + let r = ScriptsVecs::forced_import(&db, version); + tracing::debug!("ScriptsVecs imported."); r }, }; - log::debug!("Sub-vecs imported."); + tracing::debug!("Sub-vecs imported."); let this = Self { db, - block, - tx, - txin, - txout, - address, - output, + blocks, + transactions, + inputs, + outputs, + addresses, + scripts, }; this.db.retain_regions( @@ -108,18 +108,18 @@ impl Vecs { let saved_height = starting_indexes.height.decremented().unwrap_or_default(); let stamp = Stamp::from(u64::from(saved_height)); - self.block.truncate(starting_indexes.height, stamp)?; + self.blocks.truncate(starting_indexes.height, stamp)?; - self.tx + self.transactions .truncate(starting_indexes.height, starting_indexes.txindex, stamp)?; - self.txin + self.inputs .truncate(starting_indexes.height, starting_indexes.txinindex, stamp)?; - self.txout + self.outputs .truncate(starting_indexes.height, starting_indexes.txoutindex, stamp)?; - self.address.truncate( + self.addresses.truncate( starting_indexes.height, starting_indexes.p2pk65addressindex, starting_indexes.p2pk33addressindex, @@ -132,7 +132,7 @@ impl Vecs { stamp, )?; - self.output.truncate( + self.scripts.truncate( starting_indexes.height, starting_indexes.emptyoutputindex, starting_indexes.opreturnindex, @@ -150,12 +150,12 @@ impl Vecs { typeindex: TypeIndex, reader: &Reader, ) -> Result> { - self.address + self.addresses .get_bytes_by_type(addresstype, typeindex, reader) } pub fn push_bytes_if_needed(&mut self, index: TypeIndex, bytes: AddressBytes) -> Result<()> { - self.address.push_bytes_if_needed(index, bytes) + self.addresses.push_bytes_if_needed(index, bytes) } pub fn flush(&mut self, height: Height) -> Result<()> { @@ -180,24 +180,30 @@ impl Vecs { Ok(()) } + pub fn reset(&mut self) -> Result<()> { + self.par_iter_mut_any_stored_vec() + .try_for_each(|vec| vec.any_reset())?; + Ok(()) + } + pub fn iter_address_hashes_from( &self, address_type: OutputType, height: Height, ) -> Result + '_>> { - self.address.iter_hashes_from(address_type, height) + self.addresses.iter_hashes_from(address_type, height) } fn par_iter_mut_any_stored_vec( &mut self, ) -> impl ParallelIterator { - self.block + self.blocks .par_iter_mut_any() - .chain(self.tx.par_iter_mut_any()) - .chain(self.txin.par_iter_mut_any()) - .chain(self.txout.par_iter_mut_any()) - .chain(self.address.par_iter_mut_any()) - .chain(self.output.par_iter_mut_any()) + .chain(self.transactions.par_iter_mut_any()) + .chain(self.inputs.par_iter_mut_any()) + .chain(self.outputs.par_iter_mut_any()) + .chain(self.addresses.par_iter_mut_any()) + .chain(self.scripts.par_iter_mut_any()) } pub fn db(&self) -> &Database { diff --git a/crates/brk_indexer/src/vecs/output.rs b/crates/brk_indexer/src/vecs/output.rs deleted file mode 100644 index 8001e6f3e..000000000 --- a/crates/brk_indexer/src/vecs/output.rs +++ /dev/null @@ -1,99 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{ - EmptyOutputIndex, Height, OpReturnIndex, P2MSOutputIndex, TxIndex, UnknownOutputIndex, Version, -}; -use rayon::prelude::*; -use vecdb::{AnyStoredVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; - -use crate::parallel_import; - -#[derive(Clone, Traversable)] -pub struct OutputVecs { - // Height to first output index (per output type) - pub height_to_first_emptyoutputindex: PcoVec, - pub height_to_first_opreturnindex: PcoVec, - pub height_to_first_p2msoutputindex: PcoVec, - pub height_to_first_unknownoutputindex: PcoVec, - // Output index to txindex (per output type) - pub emptyoutputindex_to_txindex: PcoVec, - pub opreturnindex_to_txindex: PcoVec, - pub p2msoutputindex_to_txindex: PcoVec, - pub unknownoutputindex_to_txindex: PcoVec, -} - -impl OutputVecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - let ( - height_to_first_emptyoutputindex, - height_to_first_opreturnindex, - height_to_first_p2msoutputindex, - height_to_first_unknownoutputindex, - emptyoutputindex_to_txindex, - opreturnindex_to_txindex, - p2msoutputindex_to_txindex, - unknownoutputindex_to_txindex, - ) = parallel_import! { - height_to_first_emptyoutputindex = PcoVec::forced_import(db, "first_emptyoutputindex", version), - height_to_first_opreturnindex = PcoVec::forced_import(db, "first_opreturnindex", version), - height_to_first_p2msoutputindex = PcoVec::forced_import(db, "first_p2msoutputindex", version), - height_to_first_unknownoutputindex = PcoVec::forced_import(db, "first_unknownoutputindex", version), - emptyoutputindex_to_txindex = PcoVec::forced_import(db, "txindex", version), - opreturnindex_to_txindex = PcoVec::forced_import(db, "txindex", version), - p2msoutputindex_to_txindex = PcoVec::forced_import(db, "txindex", version), - unknownoutputindex_to_txindex = PcoVec::forced_import(db, "txindex", version), - }; - Ok(Self { - height_to_first_emptyoutputindex, - height_to_first_opreturnindex, - height_to_first_p2msoutputindex, - height_to_first_unknownoutputindex, - emptyoutputindex_to_txindex, - opreturnindex_to_txindex, - p2msoutputindex_to_txindex, - unknownoutputindex_to_txindex, - }) - } - - pub fn truncate( - &mut self, - height: Height, - emptyoutputindex: EmptyOutputIndex, - opreturnindex: OpReturnIndex, - p2msoutputindex: P2MSOutputIndex, - unknownoutputindex: UnknownOutputIndex, - stamp: Stamp, - ) -> Result<()> { - self.height_to_first_emptyoutputindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_opreturnindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_p2msoutputindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.height_to_first_unknownoutputindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.emptyoutputindex_to_txindex - .truncate_if_needed_with_stamp(emptyoutputindex, stamp)?; - self.opreturnindex_to_txindex - .truncate_if_needed_with_stamp(opreturnindex, stamp)?; - self.p2msoutputindex_to_txindex - .truncate_if_needed_with_stamp(p2msoutputindex, stamp)?; - self.unknownoutputindex_to_txindex - .truncate_if_needed_with_stamp(unknownoutputindex, stamp)?; - Ok(()) - } - - pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { - [ - &mut self.height_to_first_emptyoutputindex as &mut dyn AnyStoredVec, - &mut self.height_to_first_opreturnindex, - &mut self.height_to_first_p2msoutputindex, - &mut self.height_to_first_unknownoutputindex, - &mut self.emptyoutputindex_to_txindex, - &mut self.opreturnindex_to_txindex, - &mut self.p2msoutputindex_to_txindex, - &mut self.unknownoutputindex_to_txindex, - ] - .into_par_iter() - } -} diff --git a/crates/brk_indexer/src/vecs/outputs.rs b/crates/brk_indexer/src/vecs/outputs.rs new file mode 100644 index 000000000..783d6b65b --- /dev/null +++ b/crates/brk_indexer/src/vecs/outputs.rs @@ -0,0 +1,66 @@ +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{Height, OutputType, Sats, TxIndex, TxOutIndex, TypeIndex, Version}; +use rayon::prelude::*; +use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; + +use crate::parallel_import; + +#[derive(Clone, Traversable)] +pub struct OutputsVecs { + pub first_txoutindex: PcoVec, + pub value: BytesVec, + pub outputtype: BytesVec, + pub typeindex: BytesVec, + pub txindex: PcoVec, +} + +impl OutputsVecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + first_txoutindex, + value, + outputtype, + typeindex, + txindex, + ) = parallel_import! { + first_txoutindex = PcoVec::forced_import(db, "first_txoutindex", version), + value = BytesVec::forced_import(db, "value", version), + outputtype = BytesVec::forced_import(db, "outputtype", version), + typeindex = BytesVec::forced_import(db, "typeindex", version), + txindex = PcoVec::forced_import(db, "txindex", version), + }; + Ok(Self { + first_txoutindex, + value, + outputtype, + typeindex, + txindex, + }) + } + + pub fn truncate(&mut self, height: Height, txoutindex: TxOutIndex, stamp: Stamp) -> Result<()> { + self.first_txoutindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.value + .truncate_if_needed_with_stamp(txoutindex, stamp)?; + self.outputtype + .truncate_if_needed_with_stamp(txoutindex, stamp)?; + self.typeindex + .truncate_if_needed_with_stamp(txoutindex, stamp)?; + self.txindex + .truncate_if_needed_with_stamp(txoutindex, stamp)?; + Ok(()) + } + + pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { + [ + &mut self.first_txoutindex as &mut dyn AnyStoredVec, + &mut self.value, + &mut self.outputtype, + &mut self.typeindex, + &mut self.txindex, + ] + .into_par_iter() + } +} diff --git a/crates/brk_indexer/src/vecs/scripts.rs b/crates/brk_indexer/src/vecs/scripts.rs new file mode 100644 index 000000000..0b63adbb1 --- /dev/null +++ b/crates/brk_indexer/src/vecs/scripts.rs @@ -0,0 +1,99 @@ +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{ + EmptyOutputIndex, Height, OpReturnIndex, P2MSOutputIndex, TxIndex, UnknownOutputIndex, Version, +}; +use rayon::prelude::*; +use vecdb::{AnyStoredVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; + +use crate::parallel_import; + +#[derive(Clone, Traversable)] +pub struct ScriptsVecs { + // Height to first output index (per output type) + pub first_emptyoutputindex: PcoVec, + pub first_opreturnindex: PcoVec, + pub first_p2msoutputindex: PcoVec, + pub first_unknownoutputindex: PcoVec, + // Output index to txindex (per output type) + pub empty_to_txindex: PcoVec, + pub opreturn_to_txindex: PcoVec, + pub p2ms_to_txindex: PcoVec, + pub unknown_to_txindex: PcoVec, +} + +impl ScriptsVecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + first_emptyoutputindex, + first_opreturnindex, + first_p2msoutputindex, + first_unknownoutputindex, + emptyoutputindex_to_txindex, + opreturnindex_to_txindex, + p2msoutputindex_to_txindex, + unknownoutputindex_to_txindex, + ) = parallel_import! { + first_emptyoutputindex = PcoVec::forced_import(db, "first_emptyoutputindex", version), + first_opreturnindex = PcoVec::forced_import(db, "first_opreturnindex", version), + first_p2msoutputindex = PcoVec::forced_import(db, "first_p2msoutputindex", version), + first_unknownoutputindex = PcoVec::forced_import(db, "first_unknownoutputindex", version), + emptyoutputindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + opreturnindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + p2msoutputindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + unknownoutputindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + }; + Ok(Self { + first_emptyoutputindex, + first_opreturnindex, + first_p2msoutputindex, + first_unknownoutputindex, + empty_to_txindex: emptyoutputindex_to_txindex, + opreturn_to_txindex: opreturnindex_to_txindex, + p2ms_to_txindex: p2msoutputindex_to_txindex, + unknown_to_txindex: unknownoutputindex_to_txindex, + }) + } + + pub fn truncate( + &mut self, + height: Height, + emptyoutputindex: EmptyOutputIndex, + opreturnindex: OpReturnIndex, + p2msoutputindex: P2MSOutputIndex, + unknownoutputindex: UnknownOutputIndex, + stamp: Stamp, + ) -> Result<()> { + self.first_emptyoutputindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_opreturnindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_p2msoutputindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.first_unknownoutputindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.empty_to_txindex + .truncate_if_needed_with_stamp(emptyoutputindex, stamp)?; + self.opreturn_to_txindex + .truncate_if_needed_with_stamp(opreturnindex, stamp)?; + self.p2ms_to_txindex + .truncate_if_needed_with_stamp(p2msoutputindex, stamp)?; + self.unknown_to_txindex + .truncate_if_needed_with_stamp(unknownoutputindex, stamp)?; + Ok(()) + } + + pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { + [ + &mut self.first_emptyoutputindex as &mut dyn AnyStoredVec, + &mut self.first_opreturnindex, + &mut self.first_p2msoutputindex, + &mut self.first_unknownoutputindex, + &mut self.empty_to_txindex, + &mut self.opreturn_to_txindex, + &mut self.p2ms_to_txindex, + &mut self.unknown_to_txindex, + ] + .into_par_iter() + } +} diff --git a/crates/brk_indexer/src/vecs/transactions.rs b/crates/brk_indexer/src/vecs/transactions.rs new file mode 100644 index 000000000..83530fd40 --- /dev/null +++ b/crates/brk_indexer/src/vecs/transactions.rs @@ -0,0 +1,104 @@ +use brk_error::Result; +use brk_traversable::Traversable; +use brk_types::{ + Height, RawLockTime, StoredBool, StoredU32, TxInIndex, TxIndex, TxOutIndex, TxVersion, Txid, + Version, +}; +use rayon::prelude::*; +use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; + +use crate::parallel_import; + +#[derive(Clone, Traversable)] +pub struct TransactionsVecs { + pub first_txindex: PcoVec, + pub height: PcoVec, + pub txid: BytesVec, + pub txversion: PcoVec, + pub rawlocktime: PcoVec, + pub base_size: PcoVec, + pub total_size: PcoVec, + pub is_explicitly_rbf: PcoVec, + pub first_txinindex: PcoVec, + pub first_txoutindex: BytesVec, +} + +impl TransactionsVecs { + pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + first_txindex, + height, + txid, + txversion, + rawlocktime, + base_size, + total_size, + is_explicitly_rbf, + first_txinindex, + first_txoutindex, + ) = parallel_import! { + first_txindex = PcoVec::forced_import(db, "first_txindex", version), + height = PcoVec::forced_import(db, "height", version), + txid = BytesVec::forced_import(db, "txid", version), + txversion = PcoVec::forced_import(db, "txversion", version), + rawlocktime = PcoVec::forced_import(db, "rawlocktime", version), + base_size = PcoVec::forced_import(db, "base_size", version), + total_size = PcoVec::forced_import(db, "total_size", version), + is_explicitly_rbf = PcoVec::forced_import(db, "is_explicitly_rbf", version), + first_txinindex = PcoVec::forced_import(db, "first_txinindex", version), + first_txoutindex = BytesVec::forced_import(db, "first_txoutindex", version), + }; + Ok(Self { + first_txindex, + height, + txid, + txversion, + rawlocktime, + base_size, + total_size, + is_explicitly_rbf, + first_txinindex, + first_txoutindex, + }) + } + + pub fn truncate(&mut self, height: Height, txindex: TxIndex, stamp: Stamp) -> Result<()> { + self.first_txindex + .truncate_if_needed_with_stamp(height, stamp)?; + self.height + .truncate_if_needed_with_stamp(txindex, stamp)?; + self.txid + .truncate_if_needed_with_stamp(txindex, stamp)?; + self.txversion + .truncate_if_needed_with_stamp(txindex, stamp)?; + self.rawlocktime + .truncate_if_needed_with_stamp(txindex, stamp)?; + self.base_size + .truncate_if_needed_with_stamp(txindex, stamp)?; + self.total_size + .truncate_if_needed_with_stamp(txindex, stamp)?; + self.is_explicitly_rbf + .truncate_if_needed_with_stamp(txindex, stamp)?; + self.first_txinindex + .truncate_if_needed_with_stamp(txindex, stamp)?; + self.first_txoutindex + .truncate_if_needed_with_stamp(txindex, stamp)?; + Ok(()) + } + + pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { + [ + &mut self.first_txindex as &mut dyn AnyStoredVec, + &mut self.height, + &mut self.txid, + &mut self.txversion, + &mut self.rawlocktime, + &mut self.base_size, + &mut self.total_size, + &mut self.is_explicitly_rbf, + &mut self.first_txinindex, + &mut self.first_txoutindex, + ] + .into_par_iter() + } +} diff --git a/crates/brk_indexer/src/vecs/tx.rs b/crates/brk_indexer/src/vecs/tx.rs deleted file mode 100644 index 9cc47994d..000000000 --- a/crates/brk_indexer/src/vecs/tx.rs +++ /dev/null @@ -1,104 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{ - Height, RawLockTime, StoredBool, StoredU32, TxInIndex, TxIndex, TxOutIndex, TxVersion, Txid, - Version, -}; -use rayon::prelude::*; -use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; - -use crate::parallel_import; - -#[derive(Clone, Traversable)] -pub struct TxVecs { - pub height_to_first_txindex: PcoVec, - pub txindex_to_height: PcoVec, - pub txindex_to_txid: BytesVec, - pub txindex_to_txversion: PcoVec, - pub txindex_to_rawlocktime: PcoVec, - pub txindex_to_base_size: PcoVec, - pub txindex_to_total_size: PcoVec, - pub txindex_to_is_explicitly_rbf: PcoVec, - pub txindex_to_first_txinindex: PcoVec, - pub txindex_to_first_txoutindex: BytesVec, -} - -impl TxVecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - let ( - height_to_first_txindex, - txindex_to_height, - txindex_to_txid, - txindex_to_txversion, - txindex_to_rawlocktime, - txindex_to_base_size, - txindex_to_total_size, - txindex_to_is_explicitly_rbf, - txindex_to_first_txinindex, - txindex_to_first_txoutindex, - ) = parallel_import! { - height_to_first_txindex = PcoVec::forced_import(db, "first_txindex", version), - txindex_to_height = PcoVec::forced_import(db, "height", version), - txindex_to_txid = BytesVec::forced_import(db, "txid", version), - txindex_to_txversion = PcoVec::forced_import(db, "txversion", version), - txindex_to_rawlocktime = PcoVec::forced_import(db, "rawlocktime", version), - txindex_to_base_size = PcoVec::forced_import(db, "base_size", version), - txindex_to_total_size = PcoVec::forced_import(db, "total_size", version), - txindex_to_is_explicitly_rbf = PcoVec::forced_import(db, "is_explicitly_rbf", version), - txindex_to_first_txinindex = PcoVec::forced_import(db, "first_txinindex", version), - txindex_to_first_txoutindex = BytesVec::forced_import(db, "first_txoutindex", version), - }; - Ok(Self { - height_to_first_txindex, - txindex_to_height, - txindex_to_txid, - txindex_to_txversion, - txindex_to_rawlocktime, - txindex_to_base_size, - txindex_to_total_size, - txindex_to_is_explicitly_rbf, - txindex_to_first_txinindex, - txindex_to_first_txoutindex, - }) - } - - pub fn truncate(&mut self, height: Height, txindex: TxIndex, stamp: Stamp) -> Result<()> { - self.height_to_first_txindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.txindex_to_height - .truncate_if_needed_with_stamp(txindex, stamp)?; - self.txindex_to_txid - .truncate_if_needed_with_stamp(txindex, stamp)?; - self.txindex_to_txversion - .truncate_if_needed_with_stamp(txindex, stamp)?; - self.txindex_to_rawlocktime - .truncate_if_needed_with_stamp(txindex, stamp)?; - self.txindex_to_base_size - .truncate_if_needed_with_stamp(txindex, stamp)?; - self.txindex_to_total_size - .truncate_if_needed_with_stamp(txindex, stamp)?; - self.txindex_to_is_explicitly_rbf - .truncate_if_needed_with_stamp(txindex, stamp)?; - self.txindex_to_first_txinindex - .truncate_if_needed_with_stamp(txindex, stamp)?; - self.txindex_to_first_txoutindex - .truncate_if_needed_with_stamp(txindex, stamp)?; - Ok(()) - } - - pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { - [ - &mut self.height_to_first_txindex as &mut dyn AnyStoredVec, - &mut self.txindex_to_height, - &mut self.txindex_to_txid, - &mut self.txindex_to_txversion, - &mut self.txindex_to_rawlocktime, - &mut self.txindex_to_base_size, - &mut self.txindex_to_total_size, - &mut self.txindex_to_is_explicitly_rbf, - &mut self.txindex_to_first_txinindex, - &mut self.txindex_to_first_txoutindex, - ] - .into_par_iter() - } -} diff --git a/crates/brk_indexer/src/vecs/txin.rs b/crates/brk_indexer/src/vecs/txin.rs deleted file mode 100644 index eb8168c0c..000000000 --- a/crates/brk_indexer/src/vecs/txin.rs +++ /dev/null @@ -1,66 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Height, OutPoint, OutputType, TxInIndex, TxIndex, TypeIndex, Version}; -use rayon::prelude::*; -use vecdb::{AnyStoredVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; - -use crate::parallel_import; - -#[derive(Clone, Traversable)] -pub struct TxinVecs { - pub height_to_first_txinindex: PcoVec, - pub txinindex_to_outpoint: PcoVec, - pub txinindex_to_txindex: PcoVec, - pub txinindex_to_outputtype: PcoVec, - pub txinindex_to_typeindex: PcoVec, -} - -impl TxinVecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - let ( - height_to_first_txinindex, - txinindex_to_outpoint, - txinindex_to_txindex, - txinindex_to_outputtype, - txinindex_to_typeindex, - ) = parallel_import! { - height_to_first_txinindex = PcoVec::forced_import(db, "first_txinindex", version), - txinindex_to_outpoint = PcoVec::forced_import(db, "outpoint", version), - txinindex_to_txindex = PcoVec::forced_import(db, "txindex", version), - txinindex_to_outputtype = PcoVec::forced_import(db, "outputtype", version), - txinindex_to_typeindex = PcoVec::forced_import(db, "typeindex", version), - }; - Ok(Self { - height_to_first_txinindex, - txinindex_to_outpoint, - txinindex_to_txindex, - txinindex_to_outputtype, - txinindex_to_typeindex, - }) - } - - pub fn truncate(&mut self, height: Height, txinindex: TxInIndex, stamp: Stamp) -> Result<()> { - self.height_to_first_txinindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.txinindex_to_outpoint - .truncate_if_needed_with_stamp(txinindex, stamp)?; - self.txinindex_to_txindex - .truncate_if_needed_with_stamp(txinindex, stamp)?; - self.txinindex_to_outputtype - .truncate_if_needed_with_stamp(txinindex, stamp)?; - self.txinindex_to_typeindex - .truncate_if_needed_with_stamp(txinindex, stamp)?; - Ok(()) - } - - pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { - [ - &mut self.height_to_first_txinindex as &mut dyn AnyStoredVec, - &mut self.txinindex_to_outpoint, - &mut self.txinindex_to_txindex, - &mut self.txinindex_to_outputtype, - &mut self.txinindex_to_typeindex, - ] - .into_par_iter() - } -} diff --git a/crates/brk_indexer/src/vecs/txout.rs b/crates/brk_indexer/src/vecs/txout.rs deleted file mode 100644 index 9f4891916..000000000 --- a/crates/brk_indexer/src/vecs/txout.rs +++ /dev/null @@ -1,66 +0,0 @@ -use brk_error::Result; -use brk_traversable::Traversable; -use brk_types::{Height, OutputType, Sats, TxIndex, TxOutIndex, TypeIndex, Version}; -use rayon::prelude::*; -use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; - -use crate::parallel_import; - -#[derive(Clone, Traversable)] -pub struct TxoutVecs { - pub height_to_first_txoutindex: PcoVec, - pub txoutindex_to_value: BytesVec, - pub txoutindex_to_outputtype: BytesVec, - pub txoutindex_to_typeindex: BytesVec, - pub txoutindex_to_txindex: PcoVec, -} - -impl TxoutVecs { - pub fn forced_import(db: &Database, version: Version) -> Result { - let ( - height_to_first_txoutindex, - txoutindex_to_value, - txoutindex_to_outputtype, - txoutindex_to_typeindex, - txoutindex_to_txindex, - ) = parallel_import! { - height_to_first_txoutindex = PcoVec::forced_import(db, "first_txoutindex", version), - txoutindex_to_value = BytesVec::forced_import(db, "value", version), - txoutindex_to_outputtype = BytesVec::forced_import(db, "outputtype", version), - txoutindex_to_typeindex = BytesVec::forced_import(db, "typeindex", version), - txoutindex_to_txindex = PcoVec::forced_import(db, "txindex", version), - }; - Ok(Self { - height_to_first_txoutindex, - txoutindex_to_value, - txoutindex_to_outputtype, - txoutindex_to_typeindex, - txoutindex_to_txindex, - }) - } - - pub fn truncate(&mut self, height: Height, txoutindex: TxOutIndex, stamp: Stamp) -> Result<()> { - self.height_to_first_txoutindex - .truncate_if_needed_with_stamp(height, stamp)?; - self.txoutindex_to_value - .truncate_if_needed_with_stamp(txoutindex, stamp)?; - self.txoutindex_to_outputtype - .truncate_if_needed_with_stamp(txoutindex, stamp)?; - self.txoutindex_to_typeindex - .truncate_if_needed_with_stamp(txoutindex, stamp)?; - self.txoutindex_to_txindex - .truncate_if_needed_with_stamp(txoutindex, stamp)?; - Ok(()) - } - - pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { - [ - &mut self.height_to_first_txoutindex as &mut dyn AnyStoredVec, - &mut self.txoutindex_to_value, - &mut self.txoutindex_to_outputtype, - &mut self.txoutindex_to_typeindex, - &mut self.txoutindex_to_txindex, - ] - .into_par_iter() - } -} diff --git a/crates/brk_logger/Cargo.toml b/crates/brk_logger/Cargo.toml index eb52ce5de..d86d9d604 100644 --- a/crates/brk_logger/Cargo.toml +++ b/crates/brk_logger/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "brk_logger" -description = "A thin wrapper around env_logger" +description = "A thin wrapper around tracing with size-based log rotation" version.workspace = true edition.workspace = true license.workspace = true @@ -9,8 +9,9 @@ repository.workspace = true build = "build.rs" [dependencies] -env_logger = { workspace = true } jiff = { workspace = true } -log = { workspace = true } +logroller = "0.1" owo-colors = "4.2.3" -parking_lot = { workspace = true } +tracing = { workspace = true } +tracing-appender = "0.2" +tracing-subscriber = { version = "0.3", default-features = false, features = ["fmt", "env-filter", "std"] } diff --git a/crates/brk_logger/README.md b/crates/brk_logger/README.md index c039d9951..31b5be57c 100644 --- a/crates/brk_logger/README.md +++ b/crates/brk_logger/README.md @@ -8,7 +8,7 @@ Drop-in logging initialization that silences noisy dependencies (bitcoin, fjall, ## Key Features -- **Dual output**: Console (colorized) + optional file logging +- **Dual output**: Console (colorized) + optional file logging with size-based rotation (42MB, 2 files) - **Log hooks**: Register callbacks to intercept log messages programmatically - **Sensible defaults**: Pre-configured filters silence common verbose libraries - **Timestamp formatting**: Uses system timezone via jiff @@ -27,7 +27,7 @@ brk_logger::register_hook(|msg| { ## Usage ```rust,ignore -use log::info; +use tracing::info; fn main() -> std::io::Result<()> { brk_logger::init(None)?; diff --git a/crates/brk_logger/examples/log.rs b/crates/brk_logger/examples/log.rs index a95a5d6d3..5c6425f06 100644 --- a/crates/brk_logger/examples/log.rs +++ b/crates/brk_logger/examples/log.rs @@ -1,6 +1,6 @@ use std::io; -use log::{debug, error, info, trace}; +use tracing::{debug, error, info, trace}; fn main() -> io::Result<()> { brk_logger::init(None)?; diff --git a/crates/brk_logger/src/lib.rs b/crates/brk_logger/src/lib.rs index abc5a4a04..cb95a6f18 100644 --- a/crates/brk_logger/src/lib.rs +++ b/crates/brk_logger/src/lib.rs @@ -1,88 +1,252 @@ #![doc = include_str!("../README.md")] -use std::{ - fmt::Display, - fs::{self, OpenOptions}, - io::{self, BufWriter, Write}, - path::Path, - sync::OnceLock, +use std::{fmt::Write as _, io, path::Path, sync::OnceLock}; + +use jiff::{Timestamp, tz}; +use logroller::{LogRollerBuilder, Rotation, RotationSize}; +use owo_colors::OwoColorize; +use tracing::{Event, Level, Subscriber, field::Field}; +use tracing_appender::non_blocking::WorkerGuard; +use tracing_subscriber::{ + EnvFilter, + fmt::{self, FmtContext, FormatEvent, FormatFields, format::Writer}, + layer::SubscriberExt, + registry::LookupSpan, + util::SubscriberInitExt, }; -use env_logger::{Builder, Env}; -use jiff::{Timestamp, tz}; -pub use owo_colors::OwoColorize; -use parking_lot::Mutex; - -// Type alias for the hook function type LogHook = Box; +static GUARD: OnceLock = OnceLock::new(); static LOG_HOOK: OnceLock = OnceLock::new(); -static LOG_FILE: OnceLock>> = OnceLock::new(); -#[inline] -pub fn init(path: Option<&Path>) -> io::Result<()> { - if let Some(path) = path { - let _ = fs::remove_file(path); - let file = OpenOptions::new().create(true).append(true).open(path)?; - LOG_FILE.set(Mutex::new(BufWriter::new(file))).ok(); +const MAX_LOG_FILES: u64 = 2; +const MAX_FILE_SIZE_MB: u64 = 42; + +// Don't remove, used to know the target of unwanted logs +const WITH_TARGET: bool = false; +// const WITH_TARGET: bool = true; + +const fn level_str(level: Level) -> &'static str { + match level { + Level::ERROR => "error", + Level::WARN => "warn ", + Level::INFO => "info ", + Level::DEBUG => "debug", + Level::TRACE => "trace", + } +} + +struct Formatter; + +/// Visitor that collects structured fields for colored formatting +struct FieldVisitor { + result: String, + status: Option, + uri: Option, + latency: Option, +} + +impl FieldVisitor { + fn new() -> Self { + Self { + result: String::new(), + status: None, + uri: None, + latency: None, + } } - #[cfg(debug_assertions)] - let default_level = "debug"; - #[cfg(not(debug_assertions))] - let default_level = "info"; + fn finish(self) -> String { + // Format HTTP-style log if we have status + if let Some(status) = self.status { + let status_str = if ANSI { + match status { + 200..=299 => status.green().to_string(), + 300..=399 => status.bright_black().to_string(), + _ => status.red().to_string(), + } + } else { + status.to_string() + }; - let filter = format!( - "{default_level},bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,rustls=off,notify=off,oxc_resolver=off,tower_http=off" + let uri = self.uri.as_deref().unwrap_or(""); + let latency = self.latency.as_deref().unwrap_or(""); + + if ANSI { + format!("{status_str} {uri} {}", latency.bright_black()) + } else { + format!("{status_str} {uri} {latency}") + } + } else { + self.result + } + } +} + +impl tracing::field::Visit for FieldVisitor { + fn record_u64(&mut self, field: &Field, value: u64) { + if field.name() == "status" { + self.status = Some(value); + } else { + let _ = write!(self.result, "{}={} ", field.name(), value); + } + } + + fn record_i64(&mut self, field: &Field, value: i64) { + let _ = write!(self.result, "{}={} ", field.name(), value); + } + + fn record_str(&mut self, field: &Field, value: &str) { + if field.name() == "uri" { + self.uri = Some(value.to_string()); + } else if field.name() == "message" { + let _ = write!(self.result, "{value}"); + } else { + let _ = write!(self.result, "{}={} ", field.name(), value); + } + } + + fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { + match field.name() { + "uri" => self.uri = Some(format!("{value:?}")), + "latency" => self.latency = Some(format!("{value:?}")), + "message" => { let _ = write!(self.result, "{value:?}"); } + _ => { let _ = write!(self.result, "{}={:?} ", field.name(), value); } + } + } +} + +impl FormatEvent for Formatter +where + S: Subscriber + for<'a> LookupSpan<'a>, + N: for<'a> FormatFields<'a> + 'static, +{ + fn format_event( + &self, + _ctx: &FmtContext<'_, S, N>, + mut writer: Writer<'_>, + event: &Event<'_>, + ) -> std::fmt::Result { + let ts = Timestamp::now() + .to_zoned(tz::TimeZone::system()) + .strftime("%Y-%m-%d %H:%M:%S") + .to_string(); + + let level = *event.metadata().level(); + let level_str = level_str(level); + + if ANSI { + let level_colored = match level { + Level::ERROR => level_str.red().to_string(), + Level::WARN => level_str.yellow().to_string(), + Level::INFO => level_str.green().to_string(), + Level::DEBUG => level_str.blue().to_string(), + Level::TRACE => level_str.cyan().to_string(), + }; + if WITH_TARGET { + write!( + writer, + "{} {} {} {level_colored} ", + ts.bright_black(), + event.metadata().target(), + "-".bright_black(), + )?; + } else { + write!( + writer, + "{} {} {level_colored} ", + ts.bright_black(), + "-".bright_black() + )?; + } + } else if WITH_TARGET { + write!(writer, "{ts} {} - {level_str} ", event.metadata().target())?; + } else { + write!(writer, "{ts} - {level_str} ")?; + } + + let mut visitor = FieldVisitor::::new(); + event.record(&mut visitor); + write!(writer, "{}", visitor.finish())?; + writeln!(writer) + } +} + +struct HookLayer; + +impl tracing_subscriber::Layer for HookLayer { + fn on_event(&self, event: &Event<'_>, _: tracing_subscriber::layer::Context<'_, S>) { + if let Some(hook) = LOG_HOOK.get() { + let mut msg = String::new(); + event.record(&mut MessageVisitor(&mut msg)); + hook(&msg); + } + } +} + +struct MessageVisitor<'a>(&'a mut String); + +impl tracing::field::Visit for MessageVisitor<'_> { + fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) { + use std::fmt::Write; + if field.name() == "message" { + self.0.clear(); + let _ = write!(self.0, "{value:?}"); + } + } +} + +pub fn init(path: Option<&Path>) -> io::Result<()> { + #[cfg(debug_assertions)] + const DEFAULT_LEVEL: &str = "debug"; + #[cfg(not(debug_assertions))] + const DEFAULT_LEVEL: &str = "info"; + + let default_filter = format!( + "{DEFAULT_LEVEL},bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,rmcp=off,brk_rmcp=off,tracing=off,aide=off,rustls=off,notify=off,oxc_resolver=off,tower_http=off" ); - Builder::from_env(Env::default().default_filter_or(filter)) - .format(move |buf, record| { - let date_time = Timestamp::now() - .to_zoned(tz::TimeZone::system()) - .strftime("%Y-%m-%d %H:%M:%S") - .to_string(); - let level = record.level().as_str().to_lowercase(); - let level = format!("{level:5}"); - let target = record.target(); - let dash = "-"; - let args = record.args(); + let filter = + EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(default_filter)); - if let Some(hook) = LOG_HOOK.get() { - hook(&args.to_string()); - } + let registry = tracing_subscriber::registry() + .with(filter) + .with(fmt::layer().event_format(Formatter::)) + .with(HookLayer); - if let Some(file) = LOG_FILE.get() { - let _ = write(&mut *file.lock(), &date_time, target, &level, dash, args); - } + if let Some(path) = path { + let dir = path.parent().unwrap_or(Path::new(".")); + let filename = path + .file_name() + .and_then(|s| s.to_str()) + .unwrap_or("app.log"); - let colored_date_time = date_time.bright_black(); - let colored_level = match level.chars().next().unwrap() { - 'e' => level.red().to_string(), - 'w' => level.yellow().to_string(), - 'i' => level.green().to_string(), - 'd' => level.blue().to_string(), - 't' => level.cyan().to_string(), - _ => panic!(), - }; - let colored_dash = dash.bright_black(); + let roller = LogRollerBuilder::new(dir, Path::new(filename)) + .rotation(Rotation::SizeBased(RotationSize::MB(MAX_FILE_SIZE_MB))) + .max_keep_files(MAX_LOG_FILES) + .build() + .map_err(io::Error::other)?; - write( - buf, - colored_date_time, - target, - colored_level, - colored_dash, - args, + let (non_blocking, guard) = tracing_appender::non_blocking(roller); + GUARD.set(guard).ok(); + + registry + .with( + fmt::layer() + .event_format(Formatter::) + .with_writer(non_blocking), ) - }) - .init(); + .init(); + } else { + registry.init(); + } Ok(()) } -/// Register a hook that gets called for every log message -/// Can only be called once +/// Register a hook that gets called for every log message. +/// Can only be called once. pub fn register_hook(hook: F) -> Result<(), &'static str> where F: Fn(&str) + Send + Sync + 'static, @@ -91,20 +255,3 @@ where .set(Box::new(hook)) .map_err(|_| "Hook already registered") } - -fn write( - mut buf: impl Write, - date_time: impl Display, - _target: impl Display, - level: impl Display, - dash: impl Display, - args: impl Display, -) -> Result<(), std::io::Error> { - writeln!(buf, "{date_time} {dash} {level} {args}") - // Don't remove, used to know the target of unwanted logs - // writeln!( - // buf, - // "{} {} {} {} {}", - // date_time, _target, level, dash, args - // ) -} diff --git a/crates/brk_mcp/Cargo.toml b/crates/brk_mcp/Cargo.toml index 381df6098..f76bf1cf5 100644 --- a/crates/brk_mcp/Cargo.toml +++ b/crates/brk_mcp/Cargo.toml @@ -14,7 +14,7 @@ brk_rmcp = { version = "0.8.0", features = [ "transport-worker", "transport-streamable-http-server", ] } -log = { workspace = true } +tracing = { workspace = true } minreq = { workspace = true } schemars = { workspace = true } serde = { workspace = true } diff --git a/crates/brk_mcp/src/lib.rs b/crates/brk_mcp/src/lib.rs index 0d6b93891..ded07af64 100644 --- a/crates/brk_mcp/src/lib.rs +++ b/crates/brk_mcp/src/lib.rs @@ -9,9 +9,9 @@ use brk_rmcp::{ service::RequestContext, tool, tool_handler, tool_router, }; -use log::info; use schemars::JsonSchema; use serde::Deserialize; +use tracing::info; pub mod route; @@ -41,7 +41,9 @@ impl MCP { } } - #[tool(description = "Get the OpenAPI specification describing all available REST API endpoints.")] + #[tool( + description = "Get the OpenAPI specification describing all available REST API endpoints." + )] async fn get_openapi(&self) -> Result { info!("mcp: get_openapi"); Ok(CallToolResult::success(vec![Content::text( @@ -49,7 +51,9 @@ impl MCP { )])) } - #[tool(description = "Call a REST API endpoint. Use get_openapi first to discover available endpoints.")] + #[tool( + description = "Call a REST API endpoint. Use get_openapi first to discover available endpoints." + )] async fn fetch( &self, Parameters(params): Parameters, diff --git a/crates/brk_mcp/src/route.rs b/crates/brk_mcp/src/route.rs index f16b89b58..d5f9b7f25 100644 --- a/crates/brk_mcp/src/route.rs +++ b/crates/brk_mcp/src/route.rs @@ -5,7 +5,7 @@ use brk_rmcp::transport::{ StreamableHttpServerConfig, streamable_http_server::{StreamableHttpService, session::local::LocalSessionManager}, }; -use log::info; +use tracing::info; use crate::MCP; diff --git a/crates/brk_mempool/Cargo.toml b/crates/brk_mempool/Cargo.toml index 661d20b1c..de3f8eda2 100644 --- a/crates/brk_mempool/Cargo.toml +++ b/crates/brk_mempool/Cargo.toml @@ -13,7 +13,7 @@ brk_error = { workspace = true } brk_rpc = { workspace = true } brk_types = { workspace = true } derive_more = { workspace = true } -log = { workspace = true } +tracing = { workspace = true } parking_lot = { workspace = true } rustc-hash = { workspace = true } smallvec = { workspace = true } diff --git a/crates/brk_mempool/src/sync.rs b/crates/brk_mempool/src/sync.rs index 0360bfae8..7079a8d16 100644 --- a/crates/brk_mempool/src/sync.rs +++ b/crates/brk_mempool/src/sync.rs @@ -11,9 +11,9 @@ use brk_error::Result; use brk_rpc::Client; use brk_types::{MempoolEntryInfo, MempoolInfo, TxWithHex, Txid, TxidPrefix}; use derive_more::Deref; -use log::error; use parking_lot::{RwLock, RwLockReadGuard}; use rustc_hash::FxHashMap; +use tracing::error; use crate::{ addresses::AddressTracker, diff --git a/crates/brk_query/src/impl/address.rs b/crates/brk_query/src/impl/address.rs index 1366ada64..886f79093 100644 --- a/crates/brk_query/src/impl/address.rs +++ b/crates/brk_query/src/impl/address.rs @@ -138,7 +138,7 @@ impl Query { .map(|(key, _)| key.txindex()) .collect(); - let mut txindex_to_txid_iter = indexer.vecs.tx.txindex_to_txid.iter()?; + let mut txindex_to_txid_iter = indexer.vecs.transactions.txid.iter()?; let txids: Vec = txindices .into_iter() .map(|txindex| txindex_to_txid_iter.get_unwrap(txindex)) @@ -166,12 +166,12 @@ impl Query { .map(|(key, _): (AddressIndexOutPoint, Unit)| (key.txindex(), key.vout())) .collect(); - let mut txindex_to_txid_iter = vecs.tx.txindex_to_txid.iter()?; - let mut txindex_to_height_iter = vecs.tx.txindex_to_height.iter()?; - let mut txindex_to_first_txoutindex_iter = vecs.tx.txindex_to_first_txoutindex.iter()?; - let mut txoutindex_to_value_iter = vecs.txout.txoutindex_to_value.iter()?; - let mut height_to_blockhash_iter = vecs.block.height_to_blockhash.iter()?; - let mut height_to_timestamp_iter = vecs.block.height_to_timestamp.iter()?; + let mut txindex_to_txid_iter = vecs.transactions.txid.iter()?; + let mut txindex_to_height_iter = vecs.transactions.height.iter()?; + let mut txindex_to_first_txoutindex_iter = vecs.transactions.first_txoutindex.iter()?; + let mut txoutindex_to_value_iter = vecs.outputs.value.iter()?; + let mut height_to_blockhash_iter = vecs.blocks.blockhash.iter()?; + let mut height_to_timestamp_iter = vecs.blocks.timestamp.iter()?; let utxos: Vec = outpoints .into_iter() diff --git a/crates/brk_query/src/impl/block/info.rs b/crates/brk_query/src/impl/block/info.rs index 3290df2fb..e056b4f7e 100644 --- a/crates/brk_query/src/impl/block/info.rs +++ b/crates/brk_query/src/impl/block/info.rs @@ -20,11 +20,11 @@ impl Query { return Err(Error::OutOfRange("Block height out of range".into())); } - let blockhash = indexer.vecs.block.height_to_blockhash.read_once(height)?; - let difficulty = indexer.vecs.block.height_to_difficulty.read_once(height)?; - let timestamp = indexer.vecs.block.height_to_timestamp.read_once(height)?; - let size = indexer.vecs.block.height_to_total_size.read_once(height)?; - let weight = indexer.vecs.block.height_to_weight.read_once(height)?; + let blockhash = indexer.vecs.blocks.blockhash.read_once(height)?; + let difficulty = indexer.vecs.blocks.difficulty.read_once(height)?; + let timestamp = indexer.vecs.blocks.timestamp.read_once(height)?; + let size = indexer.vecs.blocks.total_size.read_once(height)?; + let weight = indexer.vecs.blocks.weight.read_once(height)?; let tx_count = self.tx_count_at_height(height, max_height)?; Ok(BlockInfo { @@ -75,8 +75,8 @@ impl Query { Height::from( self.indexer() .vecs - .block - .height_to_blockhash + .blocks + .blockhash .len() .saturating_sub(1), ) @@ -86,15 +86,15 @@ impl Query { let indexer = self.indexer(); let computer = self.computer(); - let first_txindex = indexer.vecs.tx.height_to_first_txindex.read_once(height)?; + let first_txindex = indexer.vecs.transactions.first_txindex.read_once(height)?; let next_first_txindex = if height < max_height { indexer .vecs - .tx - .height_to_first_txindex + .transactions + .first_txindex .read_once(height.incremented())? } else { - TxIndex::from(computer.indexes.transaction.txindex_to_txindex.len()) + TxIndex::from(computer.indexes.txindex.identity.len()) }; Ok((next_first_txindex.to_usize() - first_txindex.to_usize()) as u32) diff --git a/crates/brk_query/src/impl/block/raw.rs b/crates/brk_query/src/impl/block/raw.rs index 25ba3b46e..c18a0be7a 100644 --- a/crates/brk_query/src/impl/block/raw.rs +++ b/crates/brk_query/src/impl/block/raw.rs @@ -15,20 +15,13 @@ impl Query { let computer = self.computer(); let reader = self.reader(); - let max_height = Height::from( - indexer - .vecs - .block - .height_to_blockhash - .len() - .saturating_sub(1), - ); + let max_height = Height::from(indexer.vecs.blocks.blockhash.len().saturating_sub(1)); if height > max_height { return Err(Error::OutOfRange("Block height out of range".into())); } - let position = computer.positions.height_to_position.read_once(height)?; - let size = indexer.vecs.block.height_to_total_size.read_once(height)?; + let position = computer.positions.block_position.read_once(height)?; + let size = indexer.vecs.blocks.total_size.read_once(height)?; reader.read_raw_bytes(position, *size as usize) } diff --git a/crates/brk_query/src/impl/block/status.rs b/crates/brk_query/src/impl/block/status.rs index 6bb3b126b..7ae8ecfbf 100644 --- a/crates/brk_query/src/impl/block/status.rs +++ b/crates/brk_query/src/impl/block/status.rs @@ -16,8 +16,8 @@ impl Query { let max_height = Height::from( indexer .vecs - .block - .height_to_blockhash + .blocks + .blockhash .len() .saturating_sub(1), ); @@ -30,8 +30,8 @@ impl Query { Some( indexer .vecs - .block - .height_to_blockhash + .blocks + .blockhash .read_once(height.incremented())?, ) } else { diff --git a/crates/brk_query/src/impl/block/timestamp.rs b/crates/brk_query/src/impl/block/timestamp.rs index eea27802b..c88b0511e 100644 --- a/crates/brk_query/src/impl/block/timestamp.rs +++ b/crates/brk_query/src/impl/block/timestamp.rs @@ -24,15 +24,15 @@ impl Query { // Get first height of the target date let first_height_of_day = computer .indexes - .time - .dateindex_to_first_height + .dateindex + .first_height .read_once(dateindex) .unwrap_or(Height::from(0usize)); let start: usize = usize::from(first_height_of_day).min(max_height_usize); // Use iterator for efficient sequential access - let mut timestamp_iter = indexer.vecs.block.height_to_timestamp.iter()?; + let mut timestamp_iter = indexer.vecs.blocks.timestamp.iter()?; // Search forward from start to find the last block <= target timestamp let mut best_height = start; @@ -62,8 +62,8 @@ impl Query { let height = Height::from(best_height); let blockhash = indexer .vecs - .block - .height_to_blockhash + .blocks + .blockhash .iter()? .get_unwrap(height); diff --git a/crates/brk_query/src/impl/block/txs.rs b/crates/brk_query/src/impl/block/txs.rs index e45231e0e..023b8e0ac 100644 --- a/crates/brk_query/src/impl/block/txs.rs +++ b/crates/brk_query/src/impl/block/txs.rs @@ -31,13 +31,13 @@ impl Query { return Err(Error::OutOfRange("Block height out of range".into())); } - let first_txindex = indexer.vecs.tx.height_to_first_txindex.read_once(height)?; + let first_txindex = indexer.vecs.transactions.first_txindex.read_once(height)?; let next_first_txindex = indexer .vecs - .tx - .height_to_first_txindex + .transactions + .first_txindex .read_once(height.incremented()) - .unwrap_or_else(|_| TxIndex::from(indexer.vecs.tx.txindex_to_txid.len())); + .unwrap_or_else(|_| TxIndex::from(indexer.vecs.transactions.txid.len())); let first: usize = first_txindex.into(); let next: usize = next_first_txindex.into(); @@ -45,8 +45,8 @@ impl Query { let txids: Vec = indexer .vecs - .tx - .txindex_to_txid + .transactions + .txid .iter()? .skip(first) .take(count) @@ -67,13 +67,13 @@ impl Query { return Err(Error::OutOfRange("Block height out of range".into())); } - let first_txindex = indexer.vecs.tx.height_to_first_txindex.read_once(height)?; + let first_txindex = indexer.vecs.transactions.first_txindex.read_once(height)?; let next_first_txindex = indexer .vecs - .tx - .height_to_first_txindex + .transactions + .first_txindex .read_once(height.incremented()) - .unwrap_or_else(|_| TxIndex::from(indexer.vecs.tx.txindex_to_txid.len())); + .unwrap_or_else(|_| TxIndex::from(indexer.vecs.transactions.txid.len())); let first: usize = first_txindex.into(); let next: usize = next_first_txindex.into(); @@ -104,13 +104,13 @@ impl Query { return Err(Error::OutOfRange("Block height out of range".into())); } - let first_txindex = indexer.vecs.tx.height_to_first_txindex.read_once(height)?; + let first_txindex = indexer.vecs.transactions.first_txindex.read_once(height)?; let next_first_txindex = indexer .vecs - .tx - .height_to_first_txindex + .transactions + .first_txindex .read_once(height.incremented()) - .unwrap_or_else(|_| TxIndex::from(indexer.vecs.tx.txindex_to_txid.len())); + .unwrap_or_else(|_| TxIndex::from(indexer.vecs.transactions.txid.len())); let first: usize = first_txindex.into(); let next: usize = next_first_txindex.into(); @@ -121,7 +121,7 @@ impl Query { } let txindex = TxIndex::from(first + index); - let txid = indexer.vecs.tx.txindex_to_txid.iter()?.get_unwrap(txindex); + let txid = indexer.vecs.transactions.txid.iter()?.get_unwrap(txindex); Ok(txid) } diff --git a/crates/brk_query/src/impl/mining/block_fees.rs b/crates/brk_query/src/impl/mining/block_fees.rs index bfacd0169..a88f4e024 100644 --- a/crates/brk_query/src/impl/mining/block_fees.rs +++ b/crates/brk_query/src/impl/mining/block_fees.rs @@ -15,11 +15,10 @@ impl Query { let iter = DateIndexIter::new(computer, start, current_height.to_usize()); - // KISS: dateindex.average.0 is now a concrete field let mut fees = computer .transactions .fees - .indexes_to_fee + .fee .sats .dateindex .average diff --git a/crates/brk_query/src/impl/mining/block_rewards.rs b/crates/brk_query/src/impl/mining/block_rewards.rs index 9281bb914..b5b6daa75 100644 --- a/crates/brk_query/src/impl/mining/block_rewards.rs +++ b/crates/brk_query/src/impl/mining/block_rewards.rs @@ -15,12 +15,10 @@ impl Query { let iter = DateIndexIter::new(computer, start, current_height.to_usize()); - // coinbase = subsidy + fees - // KISS: dateindex.distribution.average.0 is now a concrete field let mut rewards = computer .blocks .rewards - .indexes_to_coinbase + .coinbase .sats .dateindex .distribution diff --git a/crates/brk_query/src/impl/mining/block_sizes.rs b/crates/brk_query/src/impl/mining/block_sizes.rs index 2ba174a6d..7d95774c5 100644 --- a/crates/brk_query/src/impl/mining/block_sizes.rs +++ b/crates/brk_query/src/impl/mining/block_sizes.rs @@ -18,7 +18,7 @@ impl Query { let mut sizes_vec = computer .blocks .size - .indexes_to_block_size + .size .dateindex .distribution .average @@ -27,7 +27,7 @@ impl Query { let mut weights_vec = computer .blocks .weight - .indexes_to_block_weight + .weight .dateindex .distribution .average diff --git a/crates/brk_query/src/impl/mining/dateindex_iter.rs b/crates/brk_query/src/impl/mining/dateindex_iter.rs index ec9247c2d..d5fa1761a 100644 --- a/crates/brk_query/src/impl/mining/dateindex_iter.rs +++ b/crates/brk_query/src/impl/mining/dateindex_iter.rs @@ -14,14 +14,14 @@ impl<'a> DateIndexIter<'a> { pub fn new(computer: &'a Computer, start_height: usize, end_height: usize) -> Self { let start_di = computer .indexes - .block - .height_to_dateindex + .height + .dateindex .read_once(Height::from(start_height)) .unwrap_or_default(); let end_di = computer .indexes - .block - .height_to_dateindex + .height + .dateindex .read_once(Height::from(end_height)) .unwrap_or_default(); @@ -46,14 +46,8 @@ impl<'a> DateIndexIter<'a> { .to_usize() .saturating_sub(self.start_di.to_usize()) + 1; - let mut timestamps = self - .computer - .blocks - .time - .timeindexes_to_timestamp - .dateindex - .iter(); - let mut heights = self.computer.indexes.time.dateindex_to_first_height.iter(); + let mut timestamps = self.computer.blocks.time.timestamp.dateindex.iter(); + let mut heights = self.computer.indexes.dateindex.first_height.iter(); let mut entries = Vec::with_capacity(total / self.step + 1); let mut i = self.start_di.to_usize(); diff --git a/crates/brk_query/src/impl/mining/difficulty.rs b/crates/brk_query/src/impl/mining/difficulty.rs index 6a8fc6e72..97f36863d 100644 --- a/crates/brk_query/src/impl/mining/difficulty.rs +++ b/crates/brk_query/src/impl/mining/difficulty.rs @@ -22,16 +22,16 @@ impl Query { // Get current epoch let current_epoch = computer .indexes - .block - .height_to_difficultyepoch + .height + .difficultyepoch .read_once(current_height)?; let current_epoch_usize: usize = current_epoch.into(); // Get epoch start height let epoch_start_height = computer .indexes - .block - .difficultyepoch_to_first_height + .difficultyepoch + .first_height .read_once(current_epoch)?; let epoch_start_u32: u32 = epoch_start_height.into(); @@ -45,12 +45,13 @@ impl Query { let epoch_start_timestamp = computer .blocks .time - .difficultyepoch_to_timestamp + .timestamp + .difficultyepoch .read_once(current_epoch)?; let current_timestamp = indexer .vecs - .block - .height_to_timestamp + .blocks + .timestamp .read_once(current_height)?; // Calculate average block time in current epoch @@ -85,19 +86,19 @@ impl Query { let prev_epoch = DifficultyEpoch::from(current_epoch_usize - 1); let prev_epoch_start = computer .indexes - .block - .difficultyepoch_to_first_height + .difficultyepoch + .first_height .read_once(prev_epoch)?; let prev_difficulty = indexer .vecs - .block - .height_to_difficulty + .blocks + .difficulty .read_once(prev_epoch_start)?; let curr_difficulty = indexer .vecs - .block - .height_to_difficulty + .blocks + .difficulty .read_once(epoch_start_height)?; if *prev_difficulty > 0.0 { diff --git a/crates/brk_query/src/impl/mining/epochs.rs b/crates/brk_query/src/impl/mining/epochs.rs index a058b4f93..9313e8938 100644 --- a/crates/brk_query/src/impl/mining/epochs.rs +++ b/crates/brk_query/src/impl/mining/epochs.rs @@ -10,29 +10,24 @@ pub fn iter_difficulty_epochs( ) -> Vec { let start_epoch = computer .indexes - .block - .height_to_difficultyepoch + .height + .difficultyepoch .read_once(Height::from(start_height)) .unwrap_or_default(); let end_epoch = computer .indexes - .block - .height_to_difficultyepoch + .height + .difficultyepoch .read_once(Height::from(end_height)) .unwrap_or_default(); let mut epoch_to_height_iter = computer .indexes - .block - .difficultyepoch_to_first_height - .iter(); - let mut epoch_to_timestamp_iter = computer.blocks.time.difficultyepoch_to_timestamp.iter(); - let mut epoch_to_difficulty_iter = computer - .blocks - .mining - .indexes_to_difficulty .difficultyepoch + .first_height .iter(); + let mut epoch_to_timestamp_iter = computer.blocks.time.timestamp.difficultyepoch.iter(); + let mut epoch_to_difficulty_iter = computer.blocks.mining.difficulty.difficultyepoch.iter(); let mut results = Vec::with_capacity(end_epoch.to_usize() - start_epoch.to_usize() + 1); let mut prev_difficulty: Option = None; diff --git a/crates/brk_query/src/impl/mining/hashrate.rs b/crates/brk_query/src/impl/mining/hashrate.rs index 2913f4888..974fee439 100644 --- a/crates/brk_query/src/impl/mining/hashrate.rs +++ b/crates/brk_query/src/impl/mining/hashrate.rs @@ -12,23 +12,19 @@ impl Query { let current_height = self.height(); // Get current difficulty - let current_difficulty = *indexer - .vecs - .block - .height_to_difficulty - .read_once(current_height)?; + let current_difficulty = *indexer.vecs.blocks.difficulty.read_once(current_height)?; // Get current hashrate let current_dateindex = computer .indexes - .block - .height_to_dateindex + .height + .dateindex .read_once(current_height)?; let current_hashrate = *computer .blocks .mining - .indexes_to_hash_rate + .hash_rate .dateindex .read_once(current_dateindex)? as u128; @@ -42,8 +38,8 @@ impl Query { // Get hashrate entries using iterators for efficiency let start_dateindex = computer .indexes - .block - .height_to_dateindex + .height + .dateindex .read_once(Height::from(start))?; let end_dateindex = current_dateindex; @@ -55,19 +51,9 @@ impl Query { let step = (total_days / 200).max(1); // Max ~200 data points // Create iterators for the loop - let mut hashrate_iter = computer - .blocks - .mining - .indexes_to_hash_rate - .dateindex - .iter(); + let mut hashrate_iter = computer.blocks.mining.hash_rate.dateindex.iter(); - let mut timestamp_iter = computer - .blocks - .time - .timeindexes_to_timestamp - .dateindex - .iter(); + let mut timestamp_iter = computer.blocks.time.timestamp.dateindex.iter(); let mut hashrates = Vec::with_capacity(total_days / step + 1); let mut di = start_dateindex.to_usize(); diff --git a/crates/brk_query/src/impl/mining/pools.rs b/crates/brk_query/src/impl/mining/pools.rs index 6c76d2026..049cb51de 100644 --- a/crates/brk_query/src/impl/mining/pools.rs +++ b/crates/brk_query/src/impl/mining/pools.rs @@ -31,7 +31,7 @@ impl Query { // For each pool, get cumulative count at end and start, subtract to get range count for (pool_id, pool_vecs) in &computer.pools.vecs { let mut cumulative = pool_vecs - .indexes_to_blocks_mined + .blocks_mined .height_cumulative .inner() .iter(); @@ -101,7 +101,7 @@ impl Query { .ok_or_else(|| Error::NotFound("Pool data not found".into()))?; let mut cumulative = pool_vecs - .indexes_to_blocks_mined + .blocks_mined .height_cumulative .inner() .iter(); diff --git a/crates/brk_query/src/impl/mining/reward_stats.rs b/crates/brk_query/src/impl/mining/reward_stats.rs index f0eb2281a..77e26200f 100644 --- a/crates/brk_query/src/impl/mining/reward_stats.rs +++ b/crates/brk_query/src/impl/mining/reward_stats.rs @@ -12,31 +12,19 @@ impl Query { let end_block = current_height; let start_block = Height::from(current_height.to_usize().saturating_sub(block_count - 1)); - // KISS: height is now a concrete field (no Option) - let mut coinbase_iter = computer - .blocks - .rewards - .indexes_to_coinbase - .sats - .height - .iter(); - // KISS: height.sum_cum.sum.0 is now a concrete field + let mut coinbase_iter = computer.blocks.rewards.coinbase.sats.height.iter(); + let mut fee_iter = computer .transactions .fees - .indexes_to_fee + .fee .sats .height .sum_cum .sum .0 .iter(); - let mut tx_count_iter = computer - .transactions - .count - .indexes_to_tx_count - .height - .iter(); + let mut tx_count_iter = computer.transactions.count.tx_count.height.iter(); let mut total_reward = Sats::ZERO; let mut total_fee = Sats::ZERO; diff --git a/crates/brk_query/src/impl/transaction.rs b/crates/brk_query/src/impl/transaction.rs index e30b10409..c35a97959 100644 --- a/crates/brk_query/src/impl/transaction.rs +++ b/crates/brk_query/src/impl/transaction.rs @@ -55,9 +55,9 @@ impl Query { }; // Get block info for status - let height = indexer.vecs.tx.txindex_to_height.read_once(txindex)?; - let block_hash = indexer.vecs.block.height_to_blockhash.read_once(height)?; - let block_time = indexer.vecs.block.height_to_timestamp.read_once(height)?; + let height = indexer.vecs.transactions.height.read_once(txindex)?; + let block_hash = indexer.vecs.blocks.blockhash.read_once(height)?; + let block_time = indexer.vecs.blocks.timestamp.read_once(height)?; Ok(TxStatus { confirmed: true, @@ -113,8 +113,8 @@ impl Query { // Calculate txoutindex let first_txoutindex = indexer .vecs - .tx - .txindex_to_first_txoutindex + .transactions + .first_txoutindex .read_once(txindex)?; let txoutindex = first_txoutindex + vout; @@ -123,7 +123,7 @@ impl Query { let txinindex = computer .outputs .spent - .txoutindex_to_txinindex + .txinindex .read_once(txoutindex)?; if txinindex == TxInIndex::UNSPENT { @@ -157,19 +157,20 @@ impl Query { // Get output range let first_txoutindex = indexer .vecs - .tx - .txindex_to_first_txoutindex + .transactions + .first_txoutindex .read_once(txindex)?; let next_first_txoutindex = indexer .vecs - .tx - .txindex_to_first_txoutindex + .transactions + .first_txoutindex .read_once(txindex.incremented())?; let output_count = usize::from(next_first_txoutindex) - usize::from(first_txoutindex); // Get spend status for each output let computer = self.computer(); - let mut txoutindex_to_txinindex_iter = computer.outputs.spent.txoutindex_to_txinindex.iter()?; + let mut txoutindex_to_txinindex_iter = + computer.outputs.spent.txinindex.iter()?; let mut outspends = Vec::with_capacity(output_count); for i in 0..output_count { @@ -194,21 +195,21 @@ impl Query { let computer = self.computer(); // Get tx metadata using read_once for single lookups - let txid = indexer.vecs.tx.txindex_to_txid.read_once(txindex)?; - let height = indexer.vecs.tx.txindex_to_height.read_once(txindex)?; - let version = indexer.vecs.tx.txindex_to_txversion.read_once(txindex)?; - let lock_time = indexer.vecs.tx.txindex_to_rawlocktime.read_once(txindex)?; - let total_size = indexer.vecs.tx.txindex_to_total_size.read_once(txindex)?; + let txid = indexer.vecs.transactions.txid.read_once(txindex)?; + let height = indexer.vecs.transactions.height.read_once(txindex)?; + let version = indexer.vecs.transactions.txversion.read_once(txindex)?; + let lock_time = indexer.vecs.transactions.rawlocktime.read_once(txindex)?; + let total_size = indexer.vecs.transactions.total_size.read_once(txindex)?; let first_txinindex = indexer .vecs - .tx - .txindex_to_first_txinindex + .transactions + .first_txinindex .read_once(txindex)?; - let position = computer.positions.txindex_to_position.read_once(txindex)?; + let position = computer.positions.tx_position.read_once(txindex)?; // Get block info for status - let block_hash = indexer.vecs.block.height_to_blockhash.read_once(height)?; - let block_time = indexer.vecs.block.height_to_timestamp.read_once(height)?; + let block_hash = indexer.vecs.blocks.blockhash.read_once(height)?; + let block_time = indexer.vecs.blocks.timestamp.read_once(height)?; // Read and decode the raw transaction from blk file let buffer = reader.read_raw_bytes(position, *total_size as usize)?; @@ -217,11 +218,11 @@ impl Query { .map_err(|_| Error::Parse("Failed to decode transaction".into()))?; // For iterating through inputs, we need iterators (multiple lookups) - let mut txindex_to_txid_iter = indexer.vecs.tx.txindex_to_txid.iter()?; + let mut txindex_to_txid_iter = indexer.vecs.transactions.txid.iter()?; let mut txindex_to_first_txoutindex_iter = - indexer.vecs.tx.txindex_to_first_txoutindex.iter()?; - let mut txinindex_to_outpoint_iter = indexer.vecs.txin.txinindex_to_outpoint.iter()?; - let mut txoutindex_to_value_iter = indexer.vecs.txout.txoutindex_to_value.iter()?; + indexer.vecs.transactions.first_txoutindex.iter()?; + let mut txinindex_to_outpoint_iter = indexer.vecs.inputs.outpoint.iter()?; + let mut txoutindex_to_value_iter = indexer.vecs.outputs.value.iter()?; // Build inputs with prevout information let input: Vec = tx @@ -313,8 +314,8 @@ impl Query { let reader = self.reader(); let computer = self.computer(); - let total_size = indexer.vecs.tx.txindex_to_total_size.read_once(txindex)?; - let position = computer.positions.txindex_to_position.read_once(txindex)?; + let total_size = indexer.vecs.transactions.total_size.read_once(txindex)?; + let position = computer.positions.tx_position.read_once(txindex)?; let buffer = reader.read_raw_bytes(position, *total_size as usize)?; @@ -325,41 +326,25 @@ impl Query { let indexer = self.indexer(); // Look up spending txindex directly - let spending_txindex = indexer - .vecs - .txin - .txinindex_to_txindex - .read_once(txinindex)?; + let spending_txindex = indexer.vecs.inputs.txindex.read_once(txinindex)?; // Calculate vin let spending_first_txinindex = indexer .vecs - .tx - .txindex_to_first_txinindex + .transactions + .first_txinindex .read_once(spending_txindex)?; let vin = Vin::from(usize::from(txinindex) - usize::from(spending_first_txinindex)); // Get spending tx details - let spending_txid = indexer - .vecs - .tx - .txindex_to_txid - .read_once(spending_txindex)?; + let spending_txid = indexer.vecs.transactions.txid.read_once(spending_txindex)?; let spending_height = indexer .vecs - .tx - .txindex_to_height + .transactions + .height .read_once(spending_txindex)?; - let block_hash = indexer - .vecs - .block - .height_to_blockhash - .read_once(spending_height)?; - let block_time = indexer - .vecs - .block - .height_to_timestamp - .read_once(spending_height)?; + let block_hash = indexer.vecs.blocks.blockhash.read_once(spending_height)?; + let block_time = indexer.vecs.blocks.timestamp.read_once(spending_height)?; Ok(TxOutspend { spent: true, diff --git a/crates/brk_query/src/lib.rs b/crates/brk_query/src/lib.rs index 90129d13a..2f6c11199 100644 --- a/crates/brk_query/src/lib.rs +++ b/crates/brk_query/src/lib.rs @@ -61,7 +61,7 @@ impl Query { /// Current indexed height pub fn height(&self) -> Height { - Height::from(self.indexer().vecs.block.height_to_blockhash.stamp()) + Height::from(self.indexer().vecs.blocks.blockhash.stamp()) } #[inline] diff --git a/crates/brk_query/src/vecs.rs b/crates/brk_query/src/vecs.rs index f57b0f004..f49ac8648 100644 --- a/crates/brk_query/src/vecs.rs +++ b/crates/brk_query/src/vecs.rs @@ -103,6 +103,8 @@ impl<'a> Vecs<'a> { .into_iter() .collect(), ) + .merge_branches() + .unwrap() .simplify() .unwrap(), ); diff --git a/crates/brk_reader/Cargo.toml b/crates/brk_reader/Cargo.toml index 0a403cb26..881e258ca 100644 --- a/crates/brk_reader/Cargo.toml +++ b/crates/brk_reader/Cargo.toml @@ -17,6 +17,6 @@ brk_rpc = { workspace = true } brk_types = { workspace = true } crossbeam = { version = "0.8.4", features = ["crossbeam-channel"] } derive_more = { workspace = true } -log = { workspace = true } +tracing = { workspace = true } parking_lot = { workspace = true } rayon = { workspace = true } diff --git a/crates/brk_reader/src/lib.rs b/crates/brk_reader/src/lib.rs index 6decf6d24..21945400c 100644 --- a/crates/brk_reader/src/lib.rs +++ b/crates/brk_reader/src/lib.rs @@ -20,9 +20,9 @@ use brk_types::{BlkMetadata, BlkPosition, BlockHash, Height, ReadBlock}; pub use crossbeam::channel::Receiver; use crossbeam::channel::bounded; use derive_more::Deref; -use log::error; use parking_lot::{RwLock, RwLockReadGuard}; use rayon::prelude::*; +use tracing::error; mod any_block; mod blk_index_to_blk_path; diff --git a/crates/brk_rpc/Cargo.toml b/crates/brk_rpc/Cargo.toml index d34220be7..bbbe28f4d 100644 --- a/crates/brk_rpc/Cargo.toml +++ b/crates/brk_rpc/Cargo.toml @@ -14,5 +14,5 @@ bitcoincore-rpc = { workspace = true } brk_error = { workspace = true } brk_logger = { workspace = true } brk_types = { workspace = true } -log = { workspace = true } +tracing = { workspace = true } parking_lot = { workspace = true } diff --git a/crates/brk_rpc/src/inner.rs b/crates/brk_rpc/src/inner.rs index fc6d45123..48647f272 100644 --- a/crates/brk_rpc/src/inner.rs +++ b/crates/brk_rpc/src/inner.rs @@ -2,8 +2,8 @@ use std::{thread::sleep, time::Duration}; use bitcoincore_rpc::{Client as CoreClient, Error as RpcError, jsonrpc}; use brk_error::Result; -use log::info; use parking_lot::RwLock; +use tracing::info; pub use bitcoincore_rpc::Auth; diff --git a/crates/brk_rpc/src/lib.rs b/crates/brk_rpc/src/lib.rs index 68c331e52..e35a6c529 100644 --- a/crates/brk_rpc/src/lib.rs +++ b/crates/brk_rpc/src/lib.rs @@ -22,7 +22,7 @@ pub use bitcoincore_rpc::Auth; mod inner; use inner::ClientInner; -use log::{debug, info}; +use tracing::{debug, info}; /// /// Bitcoin Core RPC Client diff --git a/crates/brk_server/Cargo.toml b/crates/brk_server/Cargo.toml index ebe39d7ac..a14d69e63 100644 --- a/crates/brk_server/Cargo.toml +++ b/crates/brk_server/Cargo.toml @@ -26,14 +26,13 @@ brk_traversable = { workspace = true } derive_more = { workspace = true } vecdb = { workspace = true } jiff = { workspace = true } -log = { workspace = true } quick_cache = "0.6.18" schemars = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } tokio = { workspace = true } +tracing = { workspace = true } tower-http = { version = "0.6.8", features = ["compression-full", "trace"] } -tracing = "0.1.44" [dev-dependencies] brk_mempool = { workspace = true } diff --git a/crates/brk_server/examples/server.rs b/crates/brk_server/examples/server.rs index 9d8c05222..2f720f94e 100644 --- a/crates/brk_server/examples/server.rs +++ b/crates/brk_server/examples/server.rs @@ -9,7 +9,7 @@ use brk_query::AsyncQuery; use brk_reader::Reader; use brk_rpc::{Auth, Client}; use brk_server::Server; -use log::info; +use tracing::info; use vecdb::Exit; pub fn main() -> Result<()> { @@ -61,13 +61,13 @@ fn run() -> Result<()> { // Await the handle to catch both panics and errors match handle.await { Ok(Ok(())) => info!("Server shut down cleanly"), - Ok(Err(e)) => log::error!("Server error: {e:?}"), + Ok(Err(e)) => tracing::error!("Server error: {e:?}"), Err(e) => { // JoinError - either panic or cancellation if e.is_panic() { - log::error!("Server panicked: {:?}", e.into_panic()); + tracing::error!("Server panicked: {:?}", e.into_panic()); } else { - log::error!("Server task cancelled"); + tracing::error!("Server task cancelled"); } } } diff --git a/crates/brk_server/src/files/file.rs b/crates/brk_server/src/files/file.rs index 27db71083..3cad7ba7d 100644 --- a/crates/brk_server/src/files/file.rs +++ b/crates/brk_server/src/files/file.rs @@ -1,4 +1,8 @@ -use std::{fs, path::Path, time::Duration}; +use std::{ + fs, + path::{Path, PathBuf}, + time::Duration, +}; use axum::{ body::Body, @@ -7,8 +11,8 @@ use axum::{ response::{IntoResponse, Response}, }; use brk_error::{Error, Result}; -use log::{error, info}; use quick_cache::sync::GuardResult; +use tracing::{error, info}; use crate::{AppState, HeaderMapExtended, ModifiedState, ResponseExtended}; @@ -59,12 +63,12 @@ fn any_handler( } } - // Auto-resolve .js extension for ES module imports - if !path.exists() && path.extension().is_none() { - let with_js = path.with_extension("js"); - if with_js.exists() { - path = with_js; - } + // Strip hash from import-mapped URLs (e.g., foo.abc12345.js -> foo.js) + if !path.exists() + && let Some(unhashed) = strip_importmap_hash(&path) + && unhashed.exists() + { + path = unhashed; } if !path.exists() || path.is_dir() { @@ -105,10 +109,9 @@ fn path_to_response(headers: &HeaderMap, state: &AppState, path: &Path) -> Respo fn path_to_response_(headers: &HeaderMap, state: &AppState, path: &Path) -> Result { let (modified, date) = headers.check_if_modified_since(path)?; - // TODO: Re-enable for production - // if modified == ModifiedState::NotModifiedSince { - // return Ok(Response::new_not_modified()); - // } + if !cfg!(debug_assertions) && modified == ModifiedState::NotModifiedSince { + return Ok(Response::new_not_modified()); + } let serialized_path = path.to_str().unwrap(); @@ -117,8 +120,7 @@ fn path_to_response_(headers: &HeaderMap, state: &AppState, path: &Path) -> Resu .is_some_and(|extension| extension == "html") || serialized_path.ends_with("service-worker.js"); - // TODO: Re-enable caching for production - let guard_res = if false && !must_revalidate { + let guard_res = if !cfg!(debug_assertions) && !must_revalidate { Some(state.cache.get_value_or_guard( &path.to_str().unwrap().to_owned(), Some(Duration::from_millis(50)), @@ -149,21 +151,37 @@ fn path_to_response_(headers: &HeaderMap, state: &AppState, path: &Path) -> Resu headers.insert_cors(); headers.insert_content_type(path); - // TODO: Re-enable immutable caching for production - // if must_revalidate { - // headers.insert_cache_control_must_revalidate(); - // } else if path.extension().is_some_and(|extension| { - // extension == "jpg" - // || extension == "png" - // || extension == "woff2" - // || extension == "js" - // || extension == "map" - // }) { - // headers.insert_cache_control_immutable(); - // } - headers.insert_cache_control_must_revalidate(); + if cfg!(debug_assertions) || must_revalidate { + headers.insert_cache_control_must_revalidate(); + } else { + headers.insert_cache_control_immutable(); + } headers.insert_last_modified(date); Ok(response) } + +/// Strip importmap hash from filename: `foo.abc12345.js` -> `foo.js` +/// Hash is 8 hex characters between the name and extension. +fn strip_importmap_hash(path: &Path) -> Option { + let stem = path.file_stem()?.to_str()?; + let ext = path.extension()?.to_str()?; + + // Only process js/mjs/css files + if !matches!(ext, "js" | "mjs" | "css") { + return None; + } + + // Look for pattern: name.HASH where HASH is 8 hex chars + let dot_pos = stem.rfind('.')?; + let hash = &stem[dot_pos + 1..]; + + if hash.len() == 8 && hash.chars().all(|c| c.is_ascii_hexdigit()) { + let name = &stem[..dot_pos]; + let new_name = format!("{}.{}", name, ext); + Some(path.with_file_name(new_name)) + } else { + None + } +} diff --git a/crates/brk_server/src/lib.rs b/crates/brk_server/src/lib.rs index a997d3ad3..fc55486a6 100644 --- a/crates/brk_server/src/lib.rs +++ b/crates/brk_server/src/lib.rs @@ -13,14 +13,12 @@ use axum::{ serve, }; use brk_error::Result; -use brk_logger::OwoColorize; use brk_mcp::route::mcp_router; use brk_query::AsyncQuery; -use log::{error, info}; use quick_cache::sync::Cache; use tokio::net::TcpListener; use tower_http::{compression::CompressionLayer, trace::TraceLayer}; -use tracing::Span; +use tracing::{error, info}; mod api; pub mod cache; @@ -67,24 +65,17 @@ impl Server { let trace_layer = TraceLayer::new_for_http() .on_request(()) - .on_response( - |response: &Response, latency: Duration, _span: &Span| { - let latency = latency.bright_black(); - let status = response.status(); - let uri = response.extensions().get::().unwrap(); - match status { - StatusCode::OK => { - info!("{} {} {:?}", status.as_u16().green(), uri, latency) - } - StatusCode::NOT_MODIFIED - | StatusCode::TEMPORARY_REDIRECT - | StatusCode::PERMANENT_REDIRECT => { - info!("{} {} {:?}", status.as_u16().bright_black(), uri, latency) - } - _ => error!("{} {} {:?}", status.as_u16().red(), uri, latency), - } - }, - ) + .on_response(|response: &Response, latency: Duration, _: &tracing::Span| { + let status = response.status().as_u16(); + let uri = response.extensions().get::().unwrap(); + match response.status() { + StatusCode::OK => info!(status, %uri, ?latency), + StatusCode::NOT_MODIFIED + | StatusCode::TEMPORARY_REDIRECT + | StatusCode::PERMANENT_REDIRECT => info!(status, %uri, ?latency), + _ => error!(status, %uri, ?latency), + } + }) .on_body_chunk(()) .on_failure(()) .on_eos(()); diff --git a/crates/brk_store/src/any.rs b/crates/brk_store/src/any.rs index e35ce79b6..f1a736869 100644 --- a/crates/brk_store/src/any.rs +++ b/crates/brk_store/src/any.rs @@ -8,6 +8,7 @@ pub trait AnyStore: Send + Sync { fn has(&self, height: Height) -> bool; fn needs(&self, height: Height) -> bool; fn version(&self) -> Version; + fn export_meta(&mut self, height: Height) -> Result<()>; fn export_meta_if_needed(&mut self, height: Height) -> Result<()>; fn keyspace(&self) -> &Keyspace; fn commit(&mut self, height: Height) -> Result<()>; diff --git a/crates/brk_store/src/lib.rs b/crates/brk_store/src/lib.rs index 666cde04d..bf662e2cf 100644 --- a/crates/brk_store/src/lib.rs +++ b/crates/brk_store/src/lib.rs @@ -245,9 +245,14 @@ where self.meta.needs(height) } + fn export_meta(&mut self, height: Height) -> Result<()> { + self.meta.export(height)?; + Ok(()) + } + fn export_meta_if_needed(&mut self, height: Height) -> Result<()> { if !self.has(height) { - self.meta.export(height)?; + self.export_meta(height)?; } Ok(()) } @@ -297,6 +302,10 @@ where &self.keyspace } + fn export_meta(&mut self, height: Height) -> Result<()> { + self.export_meta(height) + } + fn export_meta_if_needed(&mut self, height: Height) -> Result<()> { self.export_meta_if_needed(height) } diff --git a/crates/brk_traversable/src/lib.rs b/crates/brk_traversable/src/lib.rs index a68d5aa90..1be8608b6 100644 --- a/crates/brk_traversable/src/lib.rs +++ b/crates/brk_traversable/src/lib.rs @@ -1,4 +1,4 @@ -use std::{collections::BTreeMap, fmt::Debug}; +use std::{collections::BTreeMap, fmt::Display}; pub use brk_types::{Index, MetricLeaf, MetricLeafWithSchema, TreeNode}; @@ -210,11 +210,11 @@ impl Traversable for Option { } } -impl Traversable for BTreeMap { +impl Traversable for BTreeMap { fn to_tree_node(&self) -> TreeNode { let children = self .iter() - .map(|(k, v)| (format!("{:?}", k), v.to_tree_node())) + .map(|(k, v)| (format!("{}", k), v.to_tree_node())) .collect(); TreeNode::Branch(children) } diff --git a/crates/brk_traversable/tests/derive_tests.rs b/crates/brk_traversable/tests/derive_tests.rs index 581306754..76f4aaac0 100644 --- a/crates/brk_traversable/tests/derive_tests.rs +++ b/crates/brk_traversable/tests/derive_tests.rs @@ -872,118 +872,3 @@ fn value_block_full_denominations_as_branches() { } } -// ============================================================================ -// Case 14: ValueBlockFull with merge - smallest possible tree -// ============================================================================ -// When same metric names used across denominations, merge collapses them - -#[derive(Traversable)] -#[traversable(merge)] -struct MockValueBlockFullMerged { - pub sats: MockComputedBlockFull, - pub bitcoin: MockComputedBlockFull, - pub dollars: Option, -} - -#[test] -fn value_block_full_merged_same_names_collapse() { - // Using SAME metric names across all denominations - let value = MockValueBlockFullMerged { - sats: make_computed_block_full("metric"), - bitcoin: make_computed_block_full("metric"), // Same name! - dollars: Some(make_computed_block_full("metric")), // Same name! - }; - - let tree = value.to_tree_node(); - - // With merge and same metric names, all leaves collapse - // Result: flat tree with each metric type having all denomination indexes merged - match &tree { - TreeNode::Branch(map) => { - // Should be flat - no sats/bitcoin/dollars branches - assert!(!map.contains_key("sats"), "sats branch should be lifted"); - assert!(!map.contains_key("bitcoin"), "bitcoin branch should be lifted"); - assert!(!map.contains_key("dollars"), "dollars branch should be lifted"); - - // Should have metric keys: base, cumulative, dateindex, difficultyepoch - assert!(map.contains_key("base"), "Missing base key"); - assert!(map.contains_key("cumulative"), "Missing cumulative key"); - - // base should have all Height indexes merged (same metric name = collapsed) - if let Some(TreeNode::Leaf(l)) = map.get("base") { - assert_eq!(l.name(), "metric"); - // All 3 denominations have Height index - let indexes = l.indexes(); - assert!(indexes.contains(&Index::Height)); - } - - // cumulative should have multiple indexes merged - if let Some(TreeNode::Leaf(l)) = map.get("cumulative") { - assert_eq!(l.name(), "metric_cumulative"); - let indexes = l.indexes(); - // Height from height_cumulative, DateIndex from dateindex, Week/Month from dates, DifficultyEpoch - assert!(indexes.contains(&Index::Height)); - assert!(indexes.contains(&Index::DateIndex)); - assert!(indexes.contains(&Index::WeekIndex)); - assert!(indexes.contains(&Index::MonthIndex)); - assert!(indexes.contains(&Index::DifficultyEpoch)); - } - } - _ => panic!("Expected branch"), - } -} - -#[test] -fn computed_block_full_inner_structure() { - // Test the inner structure of a single ComputedBlockFull - let value = make_computed_block_full("metric"); - let tree = value.to_tree_node(); - - match &tree { - TreeNode::Branch(map) => { - println!("Keys: {:?}", map.keys().collect::>()); - - // Expected flat structure after all merges: - // base, cumulative, avg, sum, min, max, average, percentiles - // Note: dateindex and difficultyepoch are merged in, not separate branches - - // base from height with wrap="base" - assert!(map.contains_key("base")); - if let Some(TreeNode::Leaf(l)) = map.get("base") { - assert_eq!(l.name(), "metric"); - assert!(l.indexes().contains(&Index::Height)); - } - - // cumulative from height_cumulative + dateindex.sum_cum.cumulative + dates + difficultyepoch - assert!(map.contains_key("cumulative")); - if let Some(TreeNode::Leaf(l)) = map.get("cumulative") { - assert_eq!(l.name(), "metric_cumulative"); - let indexes = l.indexes(); - // Should have merged: Height, DateIndex, WeekIndex, MonthIndex, DifficultyEpoch - assert!(indexes.contains(&Index::Height), "Missing Height"); - assert!(indexes.contains(&Index::DateIndex), "Missing DateIndex"); - assert!(indexes.contains(&Index::WeekIndex), "Missing WeekIndex"); - assert!(indexes.contains(&Index::MonthIndex), "Missing MonthIndex"); - assert!(indexes.contains(&Index::DifficultyEpoch), "Missing DifficultyEpoch"); - } - - // avg, sum, min, max merged from dates + difficultyepoch LazyFull - assert!(map.contains_key("avg"), "Missing avg"); - assert!(map.contains_key("sum"), "Missing sum"); - assert!(map.contains_key("min"), "Missing min"); - assert!(map.contains_key("max"), "Missing max"); - - // average from dateindex.distribution.average - assert!(map.contains_key("average"), "Missing average"); - - // percentiles stays grouped - assert!(map.contains_key("percentiles"), "Missing percentiles"); - - // dateindex and difficultyepoch should NOT be separate branches - assert!(!map.contains_key("dateindex"), "dateindex should be merged in"); - assert!(!map.contains_key("difficultyepoch"), "difficultyepoch should be merged in"); - assert!(!map.contains_key("minmax"), "minmax should be flattened"); - } - _ => panic!("Expected branch"), - } -} diff --git a/crates/brk_traversable/tests/traversable/common.rs b/crates/brk_traversable/tests/traversable/common.rs new file mode 100644 index 000000000..5d00d8e3b --- /dev/null +++ b/crates/brk_traversable/tests/traversable/common.rs @@ -0,0 +1,162 @@ +//! Common mock types and helpers for traversable tests. + +use std::collections::BTreeSet; + +use brk_traversable::{Index, MetricLeaf, MetricLeafWithSchema, Traversable, TreeNode}; +use brk_traversable_derive::Traversable; + +/// Mock leaf vec that produces a Leaf node with given name and index. +/// This simulates the behavior of EagerVec>. +pub struct MockVec { + pub name: String, + pub index: Index, +} + +impl MockVec { + pub fn new(name: &str, index: Index) -> Self { + Self { + name: name.to_string(), + index, + } + } +} + +impl Traversable for MockVec { + fn to_tree_node(&self) -> TreeNode { + TreeNode::Leaf(MetricLeafWithSchema::new( + MetricLeaf::new( + self.name.clone(), + "MockType".to_string(), + BTreeSet::from([self.index]), + ), + serde_json::Value::Null, + )) + } + + fn iter_any_exportable(&self) -> impl Iterator { + std::iter::empty() + } +} + +// ============================================================================ +// Transparent Vec Types (matching real SumVec, CumulativeVec, MinVec, etc.) +// ============================================================================ +// All real Vec types are now transparent - they delegate directly to inner. + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockSumVec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockCumulativeVec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockMinVec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockMaxVec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockAverageVec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockMedianVec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockPct10Vec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockPct25Vec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockPct75Vec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockPct90Vec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockLastVec(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockFirstVec(pub MockVec); + +// ============================================================================ +// Transparent Lazy Types (matching real LazySum, LazyCumulative, etc.) +// ============================================================================ +// All real Lazy* types are now transparent. + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockLazySum(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockLazyCumulative(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockLazyMin(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockLazyMax(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockLazyAverage(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockLazyFirst(pub MockVec); + +#[derive(Traversable)] +#[traversable(transparent)] +pub struct MockLazyLast(pub MockVec); + +// ============================================================================ +// Helper functions +// ============================================================================ + +pub fn get_leaf_indexes(node: &TreeNode) -> Option<&BTreeSet> { + match node { + TreeNode::Leaf(l) => Some(l.indexes()), + _ => None, + } +} + +pub fn get_leaf_name(node: &TreeNode) -> Option<&str> { + match node { + TreeNode::Leaf(l) => Some(l.name()), + _ => None, + } +} + +pub fn assert_is_leaf(node: &TreeNode, expected_name: &str) { + match node { + TreeNode::Leaf(l) => assert_eq!(l.name(), expected_name), + TreeNode::Branch(map) => panic!("Expected Leaf, got Branch: {:?}", map.keys().collect::>()), + } +} + +pub fn assert_is_branch_with_keys(node: &TreeNode, expected_keys: &[&str]) { + match node { + TreeNode::Branch(map) => { + for key in expected_keys { + assert!(map.contains_key(*key), "Missing key: {key}"); + } + assert_eq!(map.len(), expected_keys.len(), "Got keys: {:?}", map.keys().collect::>()); + } + TreeNode::Leaf(l) => panic!("Expected Branch, got Leaf: {}", l.name()), + } +} diff --git a/crates/brk_traversable/tests/traversable/computed_types.rs b/crates/brk_traversable/tests/traversable/computed_types.rs new file mode 100644 index 000000000..f98c4c910 --- /dev/null +++ b/crates/brk_traversable/tests/traversable/computed_types.rs @@ -0,0 +1,224 @@ +//! Tests for Computed types from brk_computer/src/internal/computed/ +//! +//! Computed types combine base vecs with derived aggregations. +//! With merge, all same-key leaves collapse. +//! +//! Expected outputs: +//! - ComputedDateLast -> Leaf (all same name → single leaf) +//! - ComputedBlockSumCum -> { base, sum, cumulative } (with merged indexes) +//! - DerivedComputedBlockSumCum -> { height_cumulative, sum, cumulative } + +use brk_traversable::{Index, Traversable, TreeNode}; +use brk_traversable_derive::Traversable; + +use crate::common::*; +use crate::group_types::MockSumCum; +use crate::lazy_aggregation::MockLazySumCum; +use crate::derived_date::{MockDerivedDateLast, MockDerivedDateSumCum}; + +// ============================================================================ +// ComputedDateLast - Leaf (dateindex + rest collapse to single leaf) +// ============================================================================ + +#[derive(Traversable)] +#[traversable(merge)] +pub struct MockComputedDateLast { + pub dateindex: MockLastVec, // transparent → Leaf + #[traversable(flatten)] + pub rest: MockDerivedDateLast, // merge → Leaf (all same name) +} + +#[test] +fn computed_date_last_collapses_to_leaf() { + let value = MockComputedDateLast { + dateindex: MockLastVec(MockVec::new("metric", Index::DateIndex)), + rest: MockDerivedDateLast { + weekindex: MockLazyLast(MockVec::new("metric", Index::WeekIndex)), + monthindex: MockLazyLast(MockVec::new("metric", Index::MonthIndex)), + quarterindex: MockLazyLast(MockVec::new("metric", Index::QuarterIndex)), + semesterindex: MockLazyLast(MockVec::new("metric", Index::SemesterIndex)), + yearindex: MockLazyLast(MockVec::new("metric", Index::YearIndex)), + decadeindex: MockLazyLast(MockVec::new("metric", Index::DecadeIndex)), + }, + }; + + let tree = value.to_tree_node(); + + // All same metric name → single Leaf with all indexes + match &tree { + TreeNode::Leaf(l) => { + assert_eq!(l.name(), "metric"); + let indexes = l.indexes(); + assert!(indexes.contains(&Index::DateIndex)); + assert!(indexes.contains(&Index::WeekIndex)); + assert!(indexes.contains(&Index::MonthIndex)); + assert!(indexes.contains(&Index::QuarterIndex)); + assert!(indexes.contains(&Index::SemesterIndex)); + assert!(indexes.contains(&Index::YearIndex)); + assert!(indexes.contains(&Index::DecadeIndex)); + assert_eq!(indexes.len(), 7); + } + TreeNode::Branch(map) => { + panic!("Expected Leaf, got Branch: {:?}", map.keys().collect::>()); + } + } +} + +// ============================================================================ +// DerivedComputedBlockSumCum - { height_cumulative, sum, cumulative } +// ============================================================================ + +// For merge to work correctly, all fields produce Branch output that merge will lift. +// height_cumulative is renamed to "cumulative" so it merges with other cumulative leaves. +// NO flatten used - rely entirely on merge to lift and merge same-key leaves. +#[derive(Traversable)] +#[traversable(merge)] +pub struct MockDerivedComputedBlockSumCum { + #[traversable(rename = "cumulative")] // rename to merge with other cumulative leaves + pub height_cumulative: MockCumulativeVec, + pub dateindex: MockSumCum, // produces { sum, cumulative } - merge will lift + pub dates: MockDerivedDateSumCum, // produces { sum, cumulative } - merge will lift + pub difficultyepoch: MockLazySumCum, // produces { sum, cumulative } - merge will lift +} + +#[test] +fn derived_computed_block_sum_cum_merges_all() { + let value = MockDerivedComputedBlockSumCum { + height_cumulative: MockCumulativeVec(MockVec::new("metric_cumulative", Index::Height)), + dateindex: MockSumCum { + sum: MockSumVec(MockVec::new("metric_sum", Index::DateIndex)), + cumulative: MockCumulativeVec(MockVec::new("metric_cumulative", Index::DateIndex)), + }, + dates: MockDerivedDateSumCum { + weekindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::WeekIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::WeekIndex)), + }, + monthindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::MonthIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::MonthIndex)), + }, + quarterindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::QuarterIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::QuarterIndex)), + }, + yearindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::YearIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::YearIndex)), + }, + }, + difficultyepoch: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::DifficultyEpoch)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::DifficultyEpoch)), + }, + }; + + let tree = value.to_tree_node(); + + // Debug: print tree structure + println!("Tree: {tree:#?}"); + + // height_cumulative renamed to "cumulative" → merges with other cumulative leaves + // sum merges from dateindex + dates + difficultyepoch + // cumulative merges from height_cumulative (renamed) + dateindex + dates + difficultyepoch + assert_is_branch_with_keys(&tree, &["sum", "cumulative"]); + + if let TreeNode::Branch(map) = &tree { + // sum: DateIndex + Week + Month + Quarter + Year + DifficultyEpoch + let sum_indexes = get_leaf_indexes(map.get("sum").unwrap()).unwrap(); + assert!(sum_indexes.contains(&Index::DateIndex)); + assert!(sum_indexes.contains(&Index::WeekIndex)); + assert!(sum_indexes.contains(&Index::MonthIndex)); + assert!(sum_indexes.contains(&Index::QuarterIndex)); + assert!(sum_indexes.contains(&Index::YearIndex)); + assert!(sum_indexes.contains(&Index::DifficultyEpoch)); + assert_eq!(sum_indexes.len(), 6); + + // cumulative: Height + DateIndex + Week + Month + Quarter + Year + DifficultyEpoch + let cum_indexes = get_leaf_indexes(map.get("cumulative").unwrap()).unwrap(); + assert!(cum_indexes.contains(&Index::Height), "cumulative SHOULD have Height from renamed height_cumulative"); + assert!(cum_indexes.contains(&Index::DateIndex)); + assert!(cum_indexes.contains(&Index::WeekIndex)); + assert!(cum_indexes.contains(&Index::MonthIndex)); + assert!(cum_indexes.contains(&Index::QuarterIndex)); + assert!(cum_indexes.contains(&Index::YearIndex)); + assert!(cum_indexes.contains(&Index::DifficultyEpoch)); + assert_eq!(cum_indexes.len(), 7); + } +} + +// ============================================================================ +// ComputedBlockSumCum - { base, sum, cumulative } +// ============================================================================ + +#[derive(Traversable)] +#[traversable(merge)] +pub struct MockComputedBlockSumCum { + #[traversable(wrap = "base")] + pub height: MockSumVec, // wrap="base" → { base: Leaf } + pub rest: MockDerivedComputedBlockSumCum, // merge will lift from rest +} + +#[test] +fn computed_block_sum_cum_produces_base_sum_cumulative() { + let value = MockComputedBlockSumCum { + height: MockSumVec(MockVec::new("metric", Index::Height)), + rest: MockDerivedComputedBlockSumCum { + height_cumulative: MockCumulativeVec(MockVec::new("metric_cumulative", Index::Height)), + dateindex: MockSumCum { + sum: MockSumVec(MockVec::new("metric_sum", Index::DateIndex)), + cumulative: MockCumulativeVec(MockVec::new("metric_cumulative", Index::DateIndex)), + }, + dates: MockDerivedDateSumCum { + weekindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::WeekIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::WeekIndex)), + }, + monthindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::MonthIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::MonthIndex)), + }, + quarterindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::QuarterIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::QuarterIndex)), + }, + yearindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::YearIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::YearIndex)), + }, + }, + difficultyepoch: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::DifficultyEpoch)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::DifficultyEpoch)), + }, + }, + }; + + let tree = value.to_tree_node(); + + // base: Height only (from wrap="base") + // sum: all indexes except Height + // cumulative: all indexes INCLUDING Height (from renamed height_cumulative) + assert_is_branch_with_keys(&tree, &["base", "sum", "cumulative"]); + + if let TreeNode::Branch(map) = &tree { + // base: Height only + let base_indexes = get_leaf_indexes(map.get("base").unwrap()).unwrap(); + assert!(base_indexes.contains(&Index::Height)); + assert_eq!(base_indexes.len(), 1); + + // sum: DateIndex + all dates + DifficultyEpoch (NOT Height) + let sum_indexes = get_leaf_indexes(map.get("sum").unwrap()).unwrap(); + assert!(!sum_indexes.contains(&Index::Height), "sum should NOT have Height"); + assert!(sum_indexes.contains(&Index::DateIndex)); + assert!(sum_indexes.contains(&Index::DifficultyEpoch)); + assert_eq!(sum_indexes.len(), 6); + + // cumulative: Height + DateIndex + all dates + DifficultyEpoch + let cum_indexes = get_leaf_indexes(map.get("cumulative").unwrap()).unwrap(); + assert!(cum_indexes.contains(&Index::Height), "cumulative SHOULD have Height"); + assert!(cum_indexes.contains(&Index::DateIndex)); + assert!(cum_indexes.contains(&Index::DifficultyEpoch)); + assert_eq!(cum_indexes.len(), 7); + } +} diff --git a/crates/brk_traversable/tests/traversable/derived_date.rs b/crates/brk_traversable/tests/traversable/derived_date.rs new file mode 100644 index 000000000..43ab2cdef --- /dev/null +++ b/crates/brk_traversable/tests/traversable/derived_date.rs @@ -0,0 +1,172 @@ +//! Tests for Derived Date types from brk_computer/src/internal/derived/date/ +//! +//! Derived Date types aggregate metrics across multiple time periods (week, month, etc.). +//! With merge, all same-key leaves collapse across time periods. +//! +//! Expected outputs: +//! - DerivedDateLast -> Leaf (all indexes merged) +//! - DerivedDateSumCum -> { sum: Leaf(all), cumulative: Leaf(all) } +//! - DerivedDateFull -> { average, min, max, sum, cumulative } (all with merged indexes) +//! - etc. + +use brk_traversable::{Index, Traversable, TreeNode}; +use brk_traversable_derive::Traversable; + +use crate::common::*; +use crate::lazy_aggregation::{MockLazyFull, MockLazySumCum}; + +// ============================================================================ +// DerivedDateLast - Leaf (all same name → single leaf with all indexes) +// ============================================================================ + +#[derive(Traversable)] +#[traversable(merge)] +pub struct MockDerivedDateLast { + pub weekindex: MockLazyLast, + pub monthindex: MockLazyLast, + pub quarterindex: MockLazyLast, + pub semesterindex: MockLazyLast, + pub yearindex: MockLazyLast, + pub decadeindex: MockLazyLast, +} + +#[test] +fn derived_date_last_collapses_to_leaf() { + let value = MockDerivedDateLast { + weekindex: MockLazyLast(MockVec::new("metric", Index::WeekIndex)), + monthindex: MockLazyLast(MockVec::new("metric", Index::MonthIndex)), + quarterindex: MockLazyLast(MockVec::new("metric", Index::QuarterIndex)), + semesterindex: MockLazyLast(MockVec::new("metric", Index::SemesterIndex)), + yearindex: MockLazyLast(MockVec::new("metric", Index::YearIndex)), + decadeindex: MockLazyLast(MockVec::new("metric", Index::DecadeIndex)), + }; + + let tree = value.to_tree_node(); + + // All same metric name → collapses to single Leaf with all indexes + match &tree { + TreeNode::Leaf(l) => { + assert_eq!(l.name(), "metric"); + let indexes = l.indexes(); + assert!(indexes.contains(&Index::WeekIndex)); + assert!(indexes.contains(&Index::MonthIndex)); + assert!(indexes.contains(&Index::QuarterIndex)); + assert!(indexes.contains(&Index::SemesterIndex)); + assert!(indexes.contains(&Index::YearIndex)); + assert!(indexes.contains(&Index::DecadeIndex)); + assert_eq!(indexes.len(), 6); + } + TreeNode::Branch(map) => { + panic!("Expected Leaf, got Branch: {:?}", map.keys().collect::>()); + } + } +} + +// ============================================================================ +// DerivedDateSumCum - { sum: Leaf(all), cumulative: Leaf(all) } +// ============================================================================ + +#[derive(Traversable)] +#[traversable(merge)] +pub struct MockDerivedDateSumCum { + pub weekindex: MockLazySumCum, + pub monthindex: MockLazySumCum, + pub quarterindex: MockLazySumCum, + pub yearindex: MockLazySumCum, +} + +#[test] +fn derived_date_sum_cum_merges_all_time_periods() { + let value = MockDerivedDateSumCum { + weekindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::WeekIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::WeekIndex)), + }, + monthindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::MonthIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::MonthIndex)), + }, + quarterindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::QuarterIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::QuarterIndex)), + }, + yearindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::YearIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::YearIndex)), + }, + }; + + let tree = value.to_tree_node(); + + // Merge lifts from all time periods → { sum: Leaf(all), cumulative: Leaf(all) } + assert_is_branch_with_keys(&tree, &["sum", "cumulative"]); + + if let TreeNode::Branch(map) = &tree { + // sum should have all 4 indexes + let sum_indexes = get_leaf_indexes(map.get("sum").unwrap()).unwrap(); + assert!(sum_indexes.contains(&Index::WeekIndex)); + assert!(sum_indexes.contains(&Index::MonthIndex)); + assert!(sum_indexes.contains(&Index::QuarterIndex)); + assert!(sum_indexes.contains(&Index::YearIndex)); + assert_eq!(sum_indexes.len(), 4); + + // cumulative should have all 4 indexes + let cum_indexes = get_leaf_indexes(map.get("cumulative").unwrap()).unwrap(); + assert_eq!(cum_indexes.len(), 4); + } +} + +// ============================================================================ +// DerivedDateFull - { average, min, max, sum, cumulative } (all merged) +// ============================================================================ + +#[derive(Traversable)] +#[traversable(merge)] +pub struct MockDerivedDateFull { + pub weekindex: MockLazyFull, + pub monthindex: MockLazyFull, + pub yearindex: MockLazyFull, +} + +#[test] +fn derived_date_full_merges_all_stats() { + let value = MockDerivedDateFull { + weekindex: MockLazyFull { + average: MockLazyAverage(MockVec::new("m_avg", Index::WeekIndex)), + min: MockLazyMin(MockVec::new("m_min", Index::WeekIndex)), + max: MockLazyMax(MockVec::new("m_max", Index::WeekIndex)), + sum: MockLazySum(MockVec::new("m_sum", Index::WeekIndex)), + cumulative: MockLazyCumulative(MockVec::new("m_cum", Index::WeekIndex)), + }, + monthindex: MockLazyFull { + average: MockLazyAverage(MockVec::new("m_avg", Index::MonthIndex)), + min: MockLazyMin(MockVec::new("m_min", Index::MonthIndex)), + max: MockLazyMax(MockVec::new("m_max", Index::MonthIndex)), + sum: MockLazySum(MockVec::new("m_sum", Index::MonthIndex)), + cumulative: MockLazyCumulative(MockVec::new("m_cum", Index::MonthIndex)), + }, + yearindex: MockLazyFull { + average: MockLazyAverage(MockVec::new("m_avg", Index::YearIndex)), + min: MockLazyMin(MockVec::new("m_min", Index::YearIndex)), + max: MockLazyMax(MockVec::new("m_max", Index::YearIndex)), + sum: MockLazySum(MockVec::new("m_sum", Index::YearIndex)), + cumulative: MockLazyCumulative(MockVec::new("m_cum", Index::YearIndex)), + }, + }; + + let tree = value.to_tree_node(); + + // All same keys merge → { average, min, max, sum, cumulative } + assert_is_branch_with_keys(&tree, &["average", "min", "max", "sum", "cumulative"]); + + if let TreeNode::Branch(map) = &tree { + // Each should have 3 indexes (week, month, year) + for key in ["average", "min", "max", "sum", "cumulative"] { + let indexes = get_leaf_indexes(map.get(key).unwrap()).unwrap(); + assert_eq!(indexes.len(), 3, "{key} should have 3 indexes"); + assert!(indexes.contains(&Index::WeekIndex)); + assert!(indexes.contains(&Index::MonthIndex)); + assert!(indexes.contains(&Index::YearIndex)); + } + } +} diff --git a/crates/brk_traversable/tests/traversable/group_types.rs b/crates/brk_traversable/tests/traversable/group_types.rs new file mode 100644 index 000000000..68187e557 --- /dev/null +++ b/crates/brk_traversable/tests/traversable/group_types.rs @@ -0,0 +1,239 @@ +//! Tests for Group types from brk_computer/src/internal/group/ +//! +//! Group types aggregate multiple Vec types into logical groupings. +//! Expected outputs (flat structures): +//! - MinMax -> { min: Leaf, max: Leaf } +//! - SumCum -> { sum: Leaf, cumulative: Leaf } +//! - Percentiles -> { pct10, pct25, median, pct75, pct90 } +//! - Distribution -> { average, min, max, percentiles: {...} } +//! - Full -> { average, min, max, percentiles, sum, cumulative } +//! - Stats -> { sum, cumulative, average, min, max } +//! - MinMaxAverage -> { average, min, max } + +use brk_traversable::{Index, Traversable, TreeNode}; +use brk_traversable_derive::Traversable; + +use crate::common::*; + +// ============================================================================ +// MinMax - { min: Leaf, max: Leaf } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockMinMax { + #[traversable(flatten)] + pub min: MockMinVec, + #[traversable(flatten)] + pub max: MockMaxVec, +} + +#[test] +fn min_max_produces_two_leaves() { + let value = MockMinMax { + min: MockMinVec(MockVec::new("metric", Index::Height)), + max: MockMaxVec(MockVec::new("metric", Index::Height)), + }; + + let tree = value.to_tree_node(); + + assert_is_branch_with_keys(&tree, &["min", "max"]); +} + +// ============================================================================ +// SumCum - { sum: Leaf, cumulative: Leaf } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockSumCum { + #[traversable(flatten)] + pub sum: MockSumVec, + #[traversable(flatten)] + pub cumulative: MockCumulativeVec, +} + +#[test] +fn sum_cum_produces_two_leaves() { + let value = MockSumCum { + sum: MockSumVec(MockVec::new("metric", Index::Height)), + cumulative: MockCumulativeVec(MockVec::new("metric", Index::Height)), + }; + + let tree = value.to_tree_node(); + + assert_is_branch_with_keys(&tree, &["sum", "cumulative"]); +} + +// ============================================================================ +// Percentiles - { pct10, pct25, median, pct75, pct90 } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockPercentiles { + pub pct10: MockPct10Vec, + pub pct25: MockPct25Vec, + pub median: MockMedianVec, + pub pct75: MockPct75Vec, + pub pct90: MockPct90Vec, +} + +#[test] +fn percentiles_produces_five_leaves() { + let value = MockPercentiles { + pct10: MockPct10Vec(MockVec::new("m", Index::Height)), + pct25: MockPct25Vec(MockVec::new("m", Index::Height)), + median: MockMedianVec(MockVec::new("m", Index::Height)), + pct75: MockPct75Vec(MockVec::new("m", Index::Height)), + pct90: MockPct90Vec(MockVec::new("m", Index::Height)), + }; + + let tree = value.to_tree_node(); + + assert_is_branch_with_keys(&tree, &["pct10", "pct25", "median", "pct75", "pct90"]); +} + +// ============================================================================ +// Distribution - { average, min, max, percentiles: {...} } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockDistribution { + #[traversable(flatten)] + pub average: MockAverageVec, + #[traversable(flatten)] + pub minmax: MockMinMax, + pub percentiles: MockPercentiles, +} + +#[test] +fn distribution_flattens_average_and_minmax() { + let value = MockDistribution { + average: MockAverageVec(MockVec::new("m", Index::Height)), + minmax: MockMinMax { + min: MockMinVec(MockVec::new("m", Index::Height)), + max: MockMaxVec(MockVec::new("m", Index::Height)), + }, + percentiles: MockPercentiles { + pct10: MockPct10Vec(MockVec::new("m", Index::Height)), + pct25: MockPct25Vec(MockVec::new("m", Index::Height)), + median: MockMedianVec(MockVec::new("m", Index::Height)), + pct75: MockPct75Vec(MockVec::new("m", Index::Height)), + pct90: MockPct90Vec(MockVec::new("m", Index::Height)), + }, + }; + + let tree = value.to_tree_node(); + + // average and minmax are flattened, percentiles stays grouped + assert_is_branch_with_keys(&tree, &["average", "min", "max", "percentiles"]); + + // Verify percentiles is a branch with 5 keys + if let TreeNode::Branch(map) = &tree { + assert_is_branch_with_keys( + map.get("percentiles").unwrap(), + &["pct10", "pct25", "median", "pct75", "pct90"], + ); + } +} + +// ============================================================================ +// Full - { average, min, max, percentiles, sum, cumulative } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockFull { + #[traversable(flatten)] + pub distribution: MockDistribution, + #[traversable(flatten)] + pub sum_cum: MockSumCum, +} + +#[test] +fn full_flattens_distribution_and_sum_cum() { + let value = MockFull { + distribution: MockDistribution { + average: MockAverageVec(MockVec::new("m", Index::Height)), + minmax: MockMinMax { + min: MockMinVec(MockVec::new("m", Index::Height)), + max: MockMaxVec(MockVec::new("m", Index::Height)), + }, + percentiles: MockPercentiles { + pct10: MockPct10Vec(MockVec::new("m", Index::Height)), + pct25: MockPct25Vec(MockVec::new("m", Index::Height)), + median: MockMedianVec(MockVec::new("m", Index::Height)), + pct75: MockPct75Vec(MockVec::new("m", Index::Height)), + pct90: MockPct90Vec(MockVec::new("m", Index::Height)), + }, + }, + sum_cum: MockSumCum { + sum: MockSumVec(MockVec::new("m", Index::Height)), + cumulative: MockCumulativeVec(MockVec::new("m", Index::Height)), + }, + }; + + let tree = value.to_tree_node(); + + // Everything flattened except percentiles + assert_is_branch_with_keys( + &tree, + &["average", "min", "max", "percentiles", "sum", "cumulative"], + ); +} + +// ============================================================================ +// Stats - { sum, cumulative, average, min, max } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockStats { + #[traversable(flatten)] + pub sum_cum: MockSumCum, + #[traversable(flatten)] + pub average: MockAverageVec, + #[traversable(flatten)] + pub minmax: MockMinMax, +} + +#[test] +fn stats_flattens_all() { + let value = MockStats { + sum_cum: MockSumCum { + sum: MockSumVec(MockVec::new("m", Index::Height)), + cumulative: MockCumulativeVec(MockVec::new("m", Index::Height)), + }, + average: MockAverageVec(MockVec::new("m", Index::Height)), + minmax: MockMinMax { + min: MockMinVec(MockVec::new("m", Index::Height)), + max: MockMaxVec(MockVec::new("m", Index::Height)), + }, + }; + + let tree = value.to_tree_node(); + + assert_is_branch_with_keys(&tree, &["sum", "cumulative", "average", "min", "max"]); +} + +// ============================================================================ +// MinMaxAverage - { average, min, max } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockMinMaxAverage { + pub average: MockAverageVec, + #[traversable(flatten)] + pub minmax: MockMinMax, +} + +#[test] +fn min_max_average_flattens_minmax() { + let value = MockMinMaxAverage { + average: MockAverageVec(MockVec::new("m", Index::Height)), + minmax: MockMinMax { + min: MockMinVec(MockVec::new("m", Index::Height)), + max: MockMaxVec(MockVec::new("m", Index::Height)), + }, + }; + + let tree = value.to_tree_node(); + + assert_is_branch_with_keys(&tree, &["average", "min", "max"]); +} diff --git a/crates/brk_traversable/tests/traversable/lazy_aggregation.rs b/crates/brk_traversable/tests/traversable/lazy_aggregation.rs new file mode 100644 index 000000000..86f6df42f --- /dev/null +++ b/crates/brk_traversable/tests/traversable/lazy_aggregation.rs @@ -0,0 +1,176 @@ +//! Tests for Lazy Aggregation types from brk_computer/src/internal/aggregation/ +//! +//! Lazy aggregation types compose multiple Lazy* types. +//! Expected outputs (flat structures): +//! - LazySumCum -> { sum: Leaf, cumulative: Leaf } +//! - LazyDistribution -> { average, min, max } +//! - LazyFull -> { average, min, max, sum, cumulative } + +use brk_traversable::{Index, Traversable, TreeNode}; +use brk_traversable_derive::Traversable; + +use crate::common::*; + +// ============================================================================ +// LazySumCum - { sum: Leaf, cumulative: Leaf } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockLazySumCum { + #[traversable(flatten)] + pub sum: MockLazySum, + #[traversable(flatten)] + pub cumulative: MockLazyCumulative, +} + +#[test] +fn lazy_sum_cum_produces_two_leaves() { + let value = MockLazySumCum { + sum: MockLazySum(MockVec::new("metric", Index::WeekIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric", Index::WeekIndex)), + }; + + let tree = value.to_tree_node(); + + assert_is_branch_with_keys(&tree, &["sum", "cumulative"]); +} + +// ============================================================================ +// LazyDistribution - { average, min, max } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockLazyDistribution { + #[traversable(flatten)] + pub average: MockLazyAverage, + #[traversable(flatten)] + pub min: MockLazyMin, + #[traversable(flatten)] + pub max: MockLazyMax, +} + +#[test] +fn lazy_distribution_produces_three_leaves() { + let value = MockLazyDistribution { + average: MockLazyAverage(MockVec::new("m", Index::WeekIndex)), + min: MockLazyMin(MockVec::new("m", Index::WeekIndex)), + max: MockLazyMax(MockVec::new("m", Index::WeekIndex)), + }; + + let tree = value.to_tree_node(); + + assert_is_branch_with_keys(&tree, &["average", "min", "max"]); +} + +// ============================================================================ +// LazyFull - { average, min, max, sum, cumulative } +// ============================================================================ + +#[derive(Traversable)] +pub struct MockLazyFull { + #[traversable(flatten)] + pub average: MockLazyAverage, + #[traversable(flatten)] + pub min: MockLazyMin, + #[traversable(flatten)] + pub max: MockLazyMax, + #[traversable(flatten)] + pub sum: MockLazySum, + #[traversable(flatten)] + pub cumulative: MockLazyCumulative, +} + +#[test] +fn lazy_full_produces_five_leaves() { + let value = MockLazyFull { + average: MockLazyAverage(MockVec::new("m", Index::DifficultyEpoch)), + min: MockLazyMin(MockVec::new("m", Index::DifficultyEpoch)), + max: MockLazyMax(MockVec::new("m", Index::DifficultyEpoch)), + sum: MockLazySum(MockVec::new("m", Index::DifficultyEpoch)), + cumulative: MockLazyCumulative(MockVec::new("m", Index::DifficultyEpoch)), + }; + + let tree = value.to_tree_node(); + + assert_is_branch_with_keys(&tree, &["average", "min", "max", "sum", "cumulative"]); +} + +// ============================================================================ +// Merge behavior: Multiple time periods collapse to single leaves +// ============================================================================ + +#[derive(Traversable)] +#[traversable(merge)] +pub struct MockDerivedDateSumCum { + pub weekindex: MockLazySumCum, + pub monthindex: MockLazySumCum, +} + +#[test] +fn derived_date_sum_cum_merges_time_periods() { + let value = MockDerivedDateSumCum { + weekindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::WeekIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::WeekIndex)), + }, + monthindex: MockLazySumCum { + sum: MockLazySum(MockVec::new("metric_sum", Index::MonthIndex)), + cumulative: MockLazyCumulative(MockVec::new("metric_cumulative", Index::MonthIndex)), + }, + }; + + let tree = value.to_tree_node(); + + // Merge lifts children from weekindex and monthindex + // Same keys merge → { sum: Leaf(Week+Month), cumulative: Leaf(Week+Month) } + assert_is_branch_with_keys(&tree, &["sum", "cumulative"]); + + if let TreeNode::Branch(map) = &tree { + // sum should have both indexes merged + let sum_indexes = get_leaf_indexes(map.get("sum").unwrap()).unwrap(); + assert!(sum_indexes.contains(&Index::WeekIndex)); + assert!(sum_indexes.contains(&Index::MonthIndex)); + + // cumulative should have both indexes merged + let cum_indexes = get_leaf_indexes(map.get("cumulative").unwrap()).unwrap(); + assert!(cum_indexes.contains(&Index::WeekIndex)); + assert!(cum_indexes.contains(&Index::MonthIndex)); + } +} + +// ============================================================================ +// Full merge: All same metric name → collapses to single Leaf +// ============================================================================ + +#[derive(Traversable)] +#[traversable(merge)] +pub struct MockDerivedDateLast { + pub weekindex: MockLazyLast, + pub monthindex: MockLazyLast, + pub yearindex: MockLazyLast, +} + +#[test] +fn derived_date_last_collapses_to_single_leaf() { + let value = MockDerivedDateLast { + weekindex: MockLazyLast(MockVec::new("metric", Index::WeekIndex)), + monthindex: MockLazyLast(MockVec::new("metric", Index::MonthIndex)), + yearindex: MockLazyLast(MockVec::new("metric", Index::YearIndex)), + }; + + let tree = value.to_tree_node(); + + // All same metric name → collapses to single Leaf with all indexes + match &tree { + TreeNode::Leaf(l) => { + assert_eq!(l.name(), "metric"); + let indexes = l.indexes(); + assert!(indexes.contains(&Index::WeekIndex)); + assert!(indexes.contains(&Index::MonthIndex)); + assert!(indexes.contains(&Index::YearIndex)); + } + TreeNode::Branch(map) => { + panic!("Expected Leaf, got Branch: {:?}", map.keys().collect::>()); + } + } +} diff --git a/crates/brk_traversable/tests/traversable/main.rs b/crates/brk_traversable/tests/traversable/main.rs new file mode 100644 index 000000000..e2da782b0 --- /dev/null +++ b/crates/brk_traversable/tests/traversable/main.rs @@ -0,0 +1,12 @@ +//! Structured integration tests for the Traversable derive macro. +//! +//! These tests verify the tree structure output for each category of types +//! in brk_computer/src/internal, ensuring the traversable attributes produce +//! the expected flat/merged structures. + +mod common; + +mod group_types; +mod lazy_aggregation; +mod derived_date; +mod computed_types; diff --git a/crates/brk_traversable_derive/Cargo.toml b/crates/brk_traversable_derive/Cargo.toml index 44799100f..0007c8c8c 100644 --- a/crates/brk_traversable_derive/Cargo.toml +++ b/crates/brk_traversable_derive/Cargo.toml @@ -14,4 +14,4 @@ proc-macro = true [dependencies] syn = "2.0" quote = "1.0" -proc-macro2 = "1.0.104" +proc-macro2 = "1.0.105" diff --git a/crates/brk_traversable_derive/src/lib.rs b/crates/brk_traversable_derive/src/lib.rs index ff2ec8f1c..23ed8bd48 100644 --- a/crates/brk_traversable_derive/src/lib.rs +++ b/crates/brk_traversable_derive/src/lib.rs @@ -223,17 +223,22 @@ fn get_field_attr(field: &syn::Field) -> Option<(FieldAttr, Option, Opti continue; } - // Try parsing as name-value pairs (rename = "...", wrap = "...") - if let Ok(meta) = attr.parse_args::() - && let syn::Expr::Lit(syn::ExprLit { - lit: syn::Lit::Str(lit_str), - .. - }) = &meta.value - { - if meta.path.is_ident("rename") { - rename = Some(lit_str.value()); - } else if meta.path.is_ident("wrap") { - wrap = Some(lit_str.value()); + // Try parsing as comma-separated name-value pairs (rename = "...", wrap = "...") + if let Ok(metas) = attr.parse_args_with( + syn::punctuated::Punctuated::::parse_terminated, + ) { + for meta in metas { + if let syn::Expr::Lit(syn::ExprLit { + lit: syn::Lit::Str(lit_str), + .. + }) = &meta.value + { + if meta.path.is_ident("rename") { + rename = Some(lit_str.value()); + } else if meta.path.is_ident("wrap") { + wrap = Some(lit_str.value()); + } + } } } } @@ -259,30 +264,39 @@ fn generate_field_traversals(infos: &[FieldInfo], merge: bool) -> proc_macro2::T .filter(|i| matches!(i.attr, FieldAttr::Normal)) .map(|info| { let field_name = info.name; - // Use rename if specified, otherwise use field name - let key_str = info - .rename.as_deref() - .unwrap_or_else(|| field_name.to_string().leak()); + let field_name_str = field_name.to_string(); + + // Determine outer key and inner wrap key based on which attrs are present + // When both wrap and rename are present: wrap is outer container, rename is inner key + // When only wrap: wrap is outer container, field_name is inner key + // When only rename: rename is outer, no inner wrapping + let (outer_key, inner_wrap): (&str, Option<&str>) = + match (info.wrap.as_deref(), info.rename.as_deref()) { + (Some(wrap), Some(rename)) => (wrap, Some(rename)), + (Some(wrap), None) => (wrap, Some(&field_name_str)), + (None, Some(rename)) => (rename, None), + (None, None) => (&field_name_str, None), + }; // Generate tree node expression, optionally wrapped - let node_expr = if let Some(wrap_key) = &info.wrap { - quote! { brk_traversable::TreeNode::wrap(#wrap_key, nested.to_tree_node()) } + let node_expr = if let Some(inner_key) = inner_wrap { + quote! { brk_traversable::TreeNode::wrap(#inner_key, nested.to_tree_node()) } } else { quote! { nested.to_tree_node() } }; if info.is_option { quote! { - self.#field_name.as_ref().map(|nested| (String::from(#key_str), #node_expr)) + self.#field_name.as_ref().map(|nested| (String::from(#outer_key), #node_expr)) } } else { - let node_expr_self = if let Some(wrap_key) = &info.wrap { - quote! { brk_traversable::TreeNode::wrap(#wrap_key, self.#field_name.to_tree_node()) } + let node_expr_self = if let Some(inner_key) = inner_wrap { + quote! { brk_traversable::TreeNode::wrap(#inner_key, self.#field_name.to_tree_node()) } } else { quote! { self.#field_name.to_tree_node() } }; quote! { - Some((String::from(#key_str), #node_expr_self)) + Some((String::from(#outer_key), #node_expr_self)) } } }) @@ -328,14 +342,17 @@ fn generate_field_traversals(infos: &[FieldInfo], merge: bool) -> proc_macro2::T }; // Build collected map initialization based on what we have + // Use merge_entry to handle duplicate keys (e.g., multiple fields renamed to same key) let (init_collected, extend_flatten) = if !has_flatten { - // No flatten fields - simple collection, no need to extend + // No flatten fields - use merge_entry for each to handle duplicates ( quote! { - let collected: std::collections::BTreeMap<_, _> = [#(#normal_entries,)*] - .into_iter() - .flatten() - .collect(); + let mut collected: std::collections::BTreeMap = + std::collections::BTreeMap::new(); + for entry in [#(#normal_entries,)*].into_iter().flatten() { + brk_traversable::TreeNode::merge_node(&mut collected, entry.0, entry.1) + .expect("Conflicting values for same key"); + } }, quote! {}, ) @@ -349,13 +366,15 @@ fn generate_field_traversals(infos: &[FieldInfo], merge: bool) -> proc_macro2::T quote! { #(#flatten_entries)* }, ) } else { - // Both normal and flatten fields + // Both normal and flatten fields - use merge_entry for normal fields ( quote! { - let mut collected: std::collections::BTreeMap<_, _> = [#(#normal_entries,)*] - .into_iter() - .flatten() - .collect(); + let mut collected: std::collections::BTreeMap = + std::collections::BTreeMap::new(); + for entry in [#(#normal_entries,)*].into_iter().flatten() { + brk_traversable::TreeNode::merge_node(&mut collected, entry.0, entry.1) + .expect("Conflicting values for same key"); + } }, quote! { #(#flatten_entries)* }, ) diff --git a/crates/brk_types/src/ohlc.rs b/crates/brk_types/src/ohlc.rs index 226d49d57..a7241b967 100644 --- a/crates/brk_types/src/ohlc.rs +++ b/crates/brk_types/src/ohlc.rs @@ -156,6 +156,18 @@ impl Bytes for OHLCCents { } } +impl Add for OHLCCents { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { + Self { + open: self.open + rhs.open, + high: self.high + rhs.high, + low: self.low + rhs.low, + close: self.close + rhs.close, + } + } +} + /// OHLC (Open, High, Low, Close) data in dollars #[derive(Debug, Default, Clone, Copy, JsonSchema)] #[repr(C)] diff --git a/crates/brk_types/src/stored_u32.rs b/crates/brk_types/src/stored_u32.rs index 69eb2e0d5..0ef196256 100644 --- a/crates/brk_types/src/stored_u32.rs +++ b/crates/brk_types/src/stored_u32.rs @@ -1,4 +1,4 @@ -use std::ops::{Add, AddAssign, Div, Mul}; +use std::ops::{Add, AddAssign, Div, Mul, Sub, SubAssign}; use derive_more::Deref; use schemars::JsonSchema; @@ -101,6 +101,19 @@ impl AddAssign for StoredU32 { } } +impl Sub for StoredU32 { + type Output = Self; + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } +} + +impl SubAssign for StoredU32 { + fn sub_assign(&mut self, rhs: Self) { + *self = *self - rhs + } +} + impl Mul for StoredU32 { type Output = Self; fn mul(self, rhs: usize) -> Self::Output { diff --git a/crates/brk_types/src/treenode.rs b/crates/brk_types/src/treenode.rs index e302c3b49..81365881f 100644 --- a/crates/brk_types/src/treenode.rs +++ b/crates/brk_types/src/treenode.rs @@ -13,17 +13,17 @@ use super::Index; pub struct MetricLeaf { /// The metric name/identifier pub name: String, - /// The value type (e.g., "Sats", "StoredF64") - pub value_type: String, + /// The Rust type (e.g., "Sats", "StoredF64") + pub kind: String, /// Available indexes for this metric pub indexes: BTreeSet, } impl MetricLeaf { - pub fn new(name: String, value_type: String, indexes: BTreeSet) -> Self { + pub fn new(name: String, kind: String, indexes: BTreeSet) -> Self { Self { name, - value_type, + kind, indexes, } } @@ -40,6 +40,9 @@ pub struct MetricLeafWithSchema { /// The core metric metadata #[serde(flatten)] pub leaf: MetricLeaf, + /// JSON Schema type (e.g., "integer", "number", "string", "boolean", "array", "object") + #[serde(rename = "type")] + pub openapi_type: String, /// JSON Schema for the value type #[serde(skip)] pub schema: serde_json::Value, @@ -47,7 +50,21 @@ pub struct MetricLeafWithSchema { impl MetricLeafWithSchema { pub fn new(leaf: MetricLeaf, schema: serde_json::Value) -> Self { - Self { leaf, schema } + let openapi_type = schema + .get("type") + .and_then(|v| v.as_str()) + .unwrap_or("object") + .to_string(); + Self { + leaf, + openapi_type, + schema, + } + } + + /// The OpenAPI/JSON Schema type + pub fn openapi_type(&self) -> &str { + &self.openapi_type } /// The metric name/identifier @@ -55,9 +72,9 @@ impl MetricLeafWithSchema { &self.leaf.name } - /// The value type (e.g., "Sats", "StoredF64") - pub fn value_type(&self) -> &str { - &self.leaf.value_type + /// The Rust type (e.g., "Sats", "StoredF64") + pub fn kind(&self) -> &str { + &self.leaf.kind } /// Available indexes for this metric @@ -98,7 +115,7 @@ const BASE: &str = "base"; /// List of prefixes to remove during simplification static PREFIXES: LazyLock> = LazyLock::new(|| { - ["indexes", "timeindexes", "chainindexes"] + ["indexes", "timeindexes", "chainindexes", "addresstype"] .into_iter() .chain(Index::all().iter().map(|i| i.serialize_long())) .map(|s| format!("{s}_to_")) @@ -196,7 +213,7 @@ impl TreeNode { Self::Leaf(MetricLeafWithSchema::new( MetricLeaf::new( first.name().to_string(), - first.value_type().to_string(), + first.kind().to_string(), merged_indexes, ), first.schema.clone(), @@ -205,7 +222,7 @@ impl TreeNode { /// Merges a node into the target map at the given key (consuming version). /// Returns None if there's a conflict. - fn merge_node( + pub fn merge_node( target: &mut BTreeMap, key: String, node: TreeNode, @@ -297,9 +314,10 @@ mod tests { TreeNode::Leaf(MetricLeafWithSchema { leaf: MetricLeaf { name: name.to_string(), - value_type: "TestType".to_string(), + kind: "TestType".to_string(), indexes: BTreeSet::from([index]), }, + openapi_type: "object".to_string(), schema: serde_json::Value::Null, }) } @@ -960,7 +978,10 @@ mod tests { // sats wrapped, rest flattened with bitcoin/dollars as plain leaves let tree = branch(vec![ // sats with wrap="sats" produces Branch { sats: Leaf } - ("sats", branch(vec![("sats", leaf("metric", Index::Height))])), + ( + "sats", + branch(vec![("sats", leaf("metric", Index::Height))]), + ), // rest with flatten: LazyDerivedBlockValue fields lifted ( "rest", @@ -1146,7 +1167,10 @@ mod tests { assert!(indexes.contains(&Index::YearIndex)); } TreeNode::Branch(map) => { - panic!("Expected Leaf, got Branch: {:?}", map.keys().collect::>()); + panic!( + "Expected Leaf, got Branch: {:?}", + map.keys().collect::>() + ); } } } @@ -1180,7 +1204,10 @@ mod tests { assert!(indexes.contains(&Index::WeekIndex)); } TreeNode::Branch(map) => { - panic!("Expected Leaf, got Branch: {:?}", map.keys().collect::>()); + panic!( + "Expected Leaf, got Branch: {:?}", + map.keys().collect::>() + ); } } } diff --git a/docs/README.md b/docs/README.md index 694dd6476..e382223b1 100644 --- a/docs/README.md +++ b/docs/README.md @@ -71,7 +71,6 @@ Built on [`rust-bitcoin`], [`vecdb`], and [`fjall`]. | [`brk_mcp`](./crates/brk_mcp) | Model Context Protocol server for LLM integration | | [`brk_binder`](./crates/brk_binder) | Generate typed clients (Rust, JavaScript, Python) | | [`brk_client`](./crates/brk_client) | Generated Rust API client | -| [`brk_bundler`](./crates/brk_bundler) | JavaScript bundling for web interface | **Internal** diff --git a/modules/brk-client/index.js b/modules/brk-client/index.js index bf4163652..8d0a7ab2e 100644 --- a/modules/brk-client/index.js +++ b/modules/brk-client/index.js @@ -38,13 +38,13 @@ */ /** * @typedef {Object} AddressValidation - * @property {boolean} isvalid * @property {?string=} address - * @property {?string=} scriptPubKey * @property {?boolean=} isscript + * @property {boolean} isvalid * @property {?boolean=} iswitness - * @property {?number=} witnessVersion + * @property {?string=} scriptPubKey * @property {?string=} witnessProgram + * @property {?number=} witnessVersion */ /** @typedef {TypeIndex} AnyAddressIndex */ /** @typedef {number} Bitcoin */ @@ -55,9 +55,9 @@ */ /** * @typedef {Object} BlockFeesEntry + * @property {Sats} avgFees * @property {Height} avgHeight * @property {Timestamp} timestamp - * @property {Sats} avgFees */ /** @typedef {string} BlockHash */ /** @@ -76,25 +76,25 @@ */ /** * @typedef {Object} BlockInfo - * @property {BlockHash} id - * @property {Height} height - * @property {number} txCount - * @property {number} size - * @property {Weight} weight - * @property {Timestamp} timestamp * @property {number} difficulty + * @property {Height} height + * @property {BlockHash} id + * @property {number} size + * @property {Timestamp} timestamp + * @property {number} txCount + * @property {Weight} weight */ /** * @typedef {Object} BlockRewardsEntry * @property {number} avgHeight - * @property {number} timestamp * @property {number} avgRewards + * @property {number} timestamp */ /** * @typedef {Object} BlockSizeEntry * @property {number} avgHeight - * @property {number} timestamp * @property {number} avgSize + * @property {number} timestamp */ /** * @typedef {Object} BlockSizesWeights @@ -103,67 +103,67 @@ */ /** * @typedef {Object} BlockStatus - * @property {boolean} inBestChain * @property {(Height|null)=} height + * @property {boolean} inBestChain * @property {(BlockHash|null)=} nextBest */ /** * @typedef {Object} BlockTimestamp - * @property {Height} height * @property {BlockHash} hash + * @property {Height} height * @property {string} timestamp */ /** * @typedef {Object} BlockWeightEntry * @property {number} avgHeight - * @property {number} timestamp * @property {number} avgWeight + * @property {number} timestamp */ /** @typedef {number} Cents */ /** @typedef {Cents} Close */ /** * @typedef {Object} DataRangeFormat - * @property {?number=} from - * @property {?number=} to * @property {?number=} count * @property {Format=} format + * @property {?number=} from + * @property {?number=} to */ /** @typedef {number} Date */ /** @typedef {number} DateIndex */ /** @typedef {number} DecadeIndex */ /** * @typedef {Object} DifficultyAdjustment - * @property {number} progressPercent + * @property {number} adjustedTimeAvg * @property {number} difficultyChange * @property {number} estimatedRetargetDate + * @property {Height} nextRetargetHeight + * @property {number} previousRetarget + * @property {number} progressPercent * @property {number} remainingBlocks * @property {number} remainingTime - * @property {number} previousRetarget - * @property {Height} nextRetargetHeight * @property {number} timeAvg - * @property {number} adjustedTimeAvg * @property {number} timeOffset */ /** * @typedef {Object} DifficultyAdjustmentEntry - * @property {Timestamp} timestamp - * @property {Height} height - * @property {number} difficulty * @property {number} changePercent + * @property {number} difficulty + * @property {Height} height + * @property {Timestamp} timestamp */ /** * @typedef {Object} DifficultyEntry - * @property {Timestamp} timestamp * @property {number} difficulty * @property {Height} height + * @property {Timestamp} timestamp */ /** @typedef {number} DifficultyEpoch */ /** @typedef {number} Dollars */ /** * @typedef {Object} EmptyAddressData - * @property {number} txCount * @property {number} fundedTxoCount * @property {Sats} transfered + * @property {number} txCount */ /** @typedef {TypeIndex} EmptyAddressIndex */ /** @typedef {TypeIndex} EmptyOutputIndex */ @@ -172,20 +172,20 @@ /** @typedef {number} HalvingEpoch */ /** * @typedef {Object} HashrateEntry - * @property {Timestamp} timestamp * @property {number} avgHashrate + * @property {Timestamp} timestamp */ /** * @typedef {Object} HashrateSummary - * @property {HashrateEntry[]} hashrates - * @property {DifficultyEntry[]} difficulty - * @property {number} currentHashrate * @property {number} currentDifficulty + * @property {number} currentHashrate + * @property {DifficultyEntry[]} difficulty + * @property {HashrateEntry[]} hashrates */ /** * @typedef {Object} Health - * @property {string} status * @property {string} service + * @property {string} status * @property {string} timestamp */ /** @typedef {number} Height */ @@ -198,8 +198,8 @@ /** @typedef {("dateindex"|"decadeindex"|"difficultyepoch"|"emptyoutputindex"|"halvingepoch"|"height"|"txinindex"|"monthindex"|"opreturnindex"|"txoutindex"|"p2aaddressindex"|"p2msoutputindex"|"p2pk33addressindex"|"p2pk65addressindex"|"p2pkhaddressindex"|"p2shaddressindex"|"p2traddressindex"|"p2wpkhaddressindex"|"p2wshaddressindex"|"quarterindex"|"semesterindex"|"txindex"|"unknownoutputindex"|"weekindex"|"yearindex"|"loadedaddressindex"|"emptyaddressindex")} Index */ /** * @typedef {Object} IndexInfo - * @property {Index} index * @property {string[]} aliases + * @property {Index} index */ /** @typedef {number} Limit */ /** @@ -208,12 +208,12 @@ */ /** * @typedef {Object} LoadedAddressData - * @property {number} txCount * @property {number} fundedTxoCount - * @property {number} spentTxoCount + * @property {Dollars} realizedCap * @property {Sats} received * @property {Sats} sent - * @property {Dollars} realizedCap + * @property {number} spentTxoCount + * @property {number} txCount */ /** @typedef {TypeIndex} LoadedAddressIndex */ /** @typedef {Cents} Low */ @@ -221,30 +221,31 @@ * @typedef {Object} MempoolBlock * @property {number} blockSize * @property {number} blockVSize + * @property {FeeRate[]} feeRange + * @property {FeeRate} medianFee * @property {number} nTx * @property {Sats} totalFees - * @property {FeeRate} medianFee - * @property {FeeRate[]} feeRange */ /** * @typedef {Object} MempoolInfo * @property {number} count - * @property {VSize} vsize * @property {Sats} totalFee + * @property {VSize} vsize */ /** @typedef {string} Metric */ /** * @typedef {Object} MetricCount * @property {number} distinctMetrics - * @property {number} totalEndpoints * @property {number} lazyEndpoints * @property {number} storedEndpoints + * @property {number} totalEndpoints */ /** * @typedef {Object} MetricLeafWithSchema - * @property {string} name - * @property {string} valueType * @property {Index[]} indexes + * @property {string} kind + * @property {string} name + * @property {string} type */ /** * @typedef {Object} MetricParam @@ -252,49 +253,49 @@ */ /** * @typedef {Object} MetricSelection - * @property {Metrics} metrics - * @property {Index} index - * @property {?number=} from - * @property {?number=} to * @property {?number=} count * @property {Format=} format + * @property {?number=} from + * @property {Index} index + * @property {Metrics} metrics + * @property {?number=} to */ /** * @typedef {Object} MetricSelectionLegacy - * @property {Index} index - * @property {Metrics} ids - * @property {?number=} from - * @property {?number=} to * @property {?number=} count * @property {Format=} format + * @property {?number=} from + * @property {Metrics} ids + * @property {Index} index + * @property {?number=} to */ /** * @typedef {Object} MetricWithIndex - * @property {Metric} metric * @property {Index} index + * @property {Metric} metric */ /** @typedef {string} Metrics */ /** @typedef {number} MonthIndex */ /** * @typedef {Object} OHLCCents - * @property {Open} open + * @property {Close} close * @property {High} high * @property {Low} low - * @property {Close} close + * @property {Open} open */ /** * @typedef {Object} OHLCDollars - * @property {Open} open + * @property {Close} close * @property {High} high * @property {Low} low - * @property {Close} close + * @property {Open} open */ /** * @typedef {Object} OHLCSats - * @property {Open} open + * @property {Close} close * @property {High} high * @property {Low} low - * @property {Close} close + * @property {Open} open */ /** @typedef {TypeIndex} OpReturnIndex */ /** @typedef {Cents} Open */ @@ -329,30 +330,30 @@ */ /** * @typedef {Object} PoolBlockCounts - * @property {number} all - * @property {number} _24h * @property {number} _1w + * @property {number} _24h + * @property {number} all */ /** * @typedef {Object} PoolBlockShares - * @property {number} all - * @property {number} _24h * @property {number} _1w + * @property {number} _24h + * @property {number} all */ /** * @typedef {Object} PoolDetail - * @property {PoolDetailInfo} pool * @property {PoolBlockCounts} blockCount * @property {PoolBlockShares} blockShare * @property {number} estimatedHashrate + * @property {PoolDetailInfo} pool * @property {?number=} reportedHashrate */ /** * @typedef {Object} PoolDetailInfo - * @property {number} id - * @property {string} name - * @property {string} link * @property {string[]} addresses + * @property {number} id + * @property {string} link + * @property {string} name * @property {string[]} regexes * @property {PoolSlug} slug */ @@ -369,37 +370,37 @@ */ /** * @typedef {Object} PoolStats - * @property {number} poolId - * @property {string} name - * @property {string} link * @property {number} blockCount - * @property {number} rank * @property {number} emptyBlocks - * @property {PoolSlug} slug + * @property {string} link + * @property {string} name + * @property {number} poolId + * @property {number} rank * @property {number} share + * @property {PoolSlug} slug */ /** * @typedef {Object} PoolsSummary - * @property {PoolStats[]} pools * @property {number} blockCount * @property {number} lastEstimatedHashrate + * @property {PoolStats[]} pools */ /** @typedef {number} QuarterIndex */ /** @typedef {number} RawLockTime */ /** * @typedef {Object} RecommendedFees + * @property {FeeRate} economyFee * @property {FeeRate} fastestFee * @property {FeeRate} halfHourFee * @property {FeeRate} hourFee - * @property {FeeRate} economyFee * @property {FeeRate} minimumFee */ /** * @typedef {Object} RewardStats - * @property {Height} startBlock * @property {Height} endBlock - * @property {Sats} totalReward + * @property {Height} startBlock * @property {Sats} totalFee + * @property {Sats} totalReward * @property {number} totalTx */ /** @typedef {number} Sats */ @@ -428,29 +429,29 @@ */ /** * @typedef {Object} Transaction + * @property {Sats} fee * @property {(TxIndex|null)=} index + * @property {RawLockTime} locktime + * @property {number} sigops + * @property {number} size + * @property {TxStatus} status * @property {Txid} txid * @property {TxVersion} version - * @property {RawLockTime} locktime - * @property {number} size - * @property {Weight} weight - * @property {number} sigops - * @property {Sats} fee * @property {TxIn[]} vin * @property {TxOut[]} vout - * @property {TxStatus} status + * @property {Weight} weight */ /** @typedef {({ [key: string]: TreeNode }|MetricLeafWithSchema)} TreeNode */ /** * @typedef {Object} TxIn - * @property {Txid} txid - * @property {Vout} vout + * @property {?string=} innerRedeemscriptAsm + * @property {boolean} isCoinbase * @property {(TxOut|null)=} prevout * @property {string} scriptsig * @property {string} scriptsigAsm - * @property {boolean} isCoinbase * @property {number} sequence - * @property {?string=} innerRedeemscriptAsm + * @property {Txid} txid + * @property {Vout} vout */ /** @typedef {number} TxInIndex */ /** @typedef {number} TxIndex */ @@ -463,16 +464,16 @@ /** * @typedef {Object} TxOutspend * @property {boolean} spent + * @property {(TxStatus|null)=} status * @property {(Txid|null)=} txid * @property {(Vin|null)=} vin - * @property {(TxStatus|null)=} status */ /** * @typedef {Object} TxStatus - * @property {boolean} confirmed - * @property {(Height|null)=} blockHeight * @property {(BlockHash|null)=} blockHash + * @property {(Height|null)=} blockHeight * @property {(Timestamp|null)=} blockTime + * @property {boolean} confirmed */ /** @typedef {number} TxVersion */ /** @typedef {string} Txid */ @@ -494,10 +495,10 @@ /** @typedef {TypeIndex} UnknownOutputIndex */ /** * @typedef {Object} Utxo - * @property {Txid} txid - * @property {Vout} vout * @property {TxStatus} status + * @property {Txid} txid * @property {Sats} value + * @property {Vout} vout */ /** @typedef {number} VSize */ /** @@ -764,7 +765,7 @@ function createMetricPattern3(client, name) { /** * @template T - * @typedef {{ name: string, by: { decadeindex: MetricEndpoint, difficultyepoch: MetricEndpoint, height: MetricEndpoint, monthindex: MetricEndpoint, quarterindex: MetricEndpoint, semesterindex: MetricEndpoint, weekindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern4 + * @typedef {{ name: string, by: { dateindex: MetricEndpoint, decadeindex: MetricEndpoint, monthindex: MetricEndpoint, quarterindex: MetricEndpoint, semesterindex: MetricEndpoint, weekindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern4 */ /** @@ -775,42 +776,6 @@ function createMetricPattern3(client, name) { * @returns {MetricPattern4} */ function createMetricPattern4(client, name) { - return { - name, - by: { - get decadeindex() { return _endpoint(client, name, 'decadeindex'); }, - get difficultyepoch() { return _endpoint(client, name, 'difficultyepoch'); }, - get height() { return _endpoint(client, name, 'height'); }, - get monthindex() { return _endpoint(client, name, 'monthindex'); }, - get quarterindex() { return _endpoint(client, name, 'quarterindex'); }, - get semesterindex() { return _endpoint(client, name, 'semesterindex'); }, - get weekindex() { return _endpoint(client, name, 'weekindex'); }, - get yearindex() { return _endpoint(client, name, 'yearindex'); } - }, - indexes() { - return ['decadeindex', 'difficultyepoch', 'height', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex']; - }, - get(index) { - if (this.indexes().includes(index)) { - return _endpoint(client, name, index); - } - } - }; -} - -/** - * @template T - * @typedef {{ name: string, by: { dateindex: MetricEndpoint, decadeindex: MetricEndpoint, monthindex: MetricEndpoint, quarterindex: MetricEndpoint, semesterindex: MetricEndpoint, weekindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern5 - */ - -/** - * Create a MetricPattern5 accessor - * @template T - * @param {BrkClientBase} client - * @param {string} name - The metric vec name - * @returns {MetricPattern5} - */ -function createMetricPattern5(client, name) { return { name, by: { @@ -835,52 +800,17 @@ function createMetricPattern5(client, name) { /** * @template T - * @typedef {{ name: string, by: { decadeindex: MetricEndpoint, difficultyepoch: MetricEndpoint, monthindex: MetricEndpoint, quarterindex: MetricEndpoint, semesterindex: MetricEndpoint, weekindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern6 + * @typedef {{ name: string, by: { decadeindex: MetricEndpoint, monthindex: MetricEndpoint, quarterindex: MetricEndpoint, semesterindex: MetricEndpoint, weekindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern5 */ /** - * Create a MetricPattern6 accessor + * Create a MetricPattern5 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern6} + * @returns {MetricPattern5} */ -function createMetricPattern6(client, name) { - return { - name, - by: { - get decadeindex() { return _endpoint(client, name, 'decadeindex'); }, - get difficultyepoch() { return _endpoint(client, name, 'difficultyepoch'); }, - get monthindex() { return _endpoint(client, name, 'monthindex'); }, - get quarterindex() { return _endpoint(client, name, 'quarterindex'); }, - get semesterindex() { return _endpoint(client, name, 'semesterindex'); }, - get weekindex() { return _endpoint(client, name, 'weekindex'); }, - get yearindex() { return _endpoint(client, name, 'yearindex'); } - }, - indexes() { - return ['decadeindex', 'difficultyepoch', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex']; - }, - get(index) { - if (this.indexes().includes(index)) { - return _endpoint(client, name, index); - } - } - }; -} - -/** - * @template T - * @typedef {{ name: string, by: { decadeindex: MetricEndpoint, monthindex: MetricEndpoint, quarterindex: MetricEndpoint, semesterindex: MetricEndpoint, weekindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern7 - */ - -/** - * Create a MetricPattern7 accessor - * @template T - * @param {BrkClientBase} client - * @param {string} name - The metric vec name - * @returns {MetricPattern7} - */ -function createMetricPattern7(client, name) { +function createMetricPattern5(client, name) { return { name, by: { @@ -904,49 +834,17 @@ function createMetricPattern7(client, name) { /** * @template T - * @typedef {{ name: string, by: { emptyoutputindex: MetricEndpoint, opreturnindex: MetricEndpoint, p2msoutputindex: MetricEndpoint, unknownoutputindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern8 + * @typedef {{ name: string, by: { quarterindex: MetricEndpoint, semesterindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern6 */ /** - * Create a MetricPattern8 accessor + * Create a MetricPattern6 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern8} + * @returns {MetricPattern6} */ -function createMetricPattern8(client, name) { - return { - name, - by: { - get emptyoutputindex() { return _endpoint(client, name, 'emptyoutputindex'); }, - get opreturnindex() { return _endpoint(client, name, 'opreturnindex'); }, - get p2msoutputindex() { return _endpoint(client, name, 'p2msoutputindex'); }, - get unknownoutputindex() { return _endpoint(client, name, 'unknownoutputindex'); } - }, - indexes() { - return ['emptyoutputindex', 'opreturnindex', 'p2msoutputindex', 'unknownoutputindex']; - }, - get(index) { - if (this.indexes().includes(index)) { - return _endpoint(client, name, index); - } - } - }; -} - -/** - * @template T - * @typedef {{ name: string, by: { quarterindex: MetricEndpoint, semesterindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern9 - */ - -/** - * Create a MetricPattern9 accessor - * @template T - * @param {BrkClientBase} client - * @param {string} name - The metric vec name - * @returns {MetricPattern9} - */ -function createMetricPattern9(client, name) { +function createMetricPattern6(client, name) { return { name, by: { @@ -967,17 +865,17 @@ function createMetricPattern9(client, name) { /** * @template T - * @typedef {{ name: string, by: { dateindex: MetricEndpoint, height: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern10 + * @typedef {{ name: string, by: { dateindex: MetricEndpoint, height: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern7 */ /** - * Create a MetricPattern10 accessor + * Create a MetricPattern7 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern10} + * @returns {MetricPattern7} */ -function createMetricPattern10(client, name) { +function createMetricPattern7(client, name) { return { name, by: { @@ -997,17 +895,17 @@ function createMetricPattern10(client, name) { /** * @template T - * @typedef {{ name: string, by: { dateindex: MetricEndpoint, monthindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern11 + * @typedef {{ name: string, by: { dateindex: MetricEndpoint, monthindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern8 */ /** - * Create a MetricPattern11 accessor + * Create a MetricPattern8 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern11} + * @returns {MetricPattern8} */ -function createMetricPattern11(client, name) { +function createMetricPattern8(client, name) { return { name, by: { @@ -1027,17 +925,17 @@ function createMetricPattern11(client, name) { /** * @template T - * @typedef {{ name: string, by: { dateindex: MetricEndpoint, weekindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern12 + * @typedef {{ name: string, by: { dateindex: MetricEndpoint, weekindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern9 */ /** - * Create a MetricPattern12 accessor + * Create a MetricPattern9 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern12} + * @returns {MetricPattern9} */ -function createMetricPattern12(client, name) { +function createMetricPattern9(client, name) { return { name, by: { @@ -1057,17 +955,17 @@ function createMetricPattern12(client, name) { /** * @template T - * @typedef {{ name: string, by: { decadeindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern13 + * @typedef {{ name: string, by: { decadeindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern10 */ /** - * Create a MetricPattern13 accessor + * Create a MetricPattern10 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern13} + * @returns {MetricPattern10} */ -function createMetricPattern13(client, name) { +function createMetricPattern10(client, name) { return { name, by: { @@ -1087,17 +985,17 @@ function createMetricPattern13(client, name) { /** * @template T - * @typedef {{ name: string, by: { difficultyepoch: MetricEndpoint, halvingepoch: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern14 + * @typedef {{ name: string, by: { difficultyepoch: MetricEndpoint, halvingepoch: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern11 */ /** - * Create a MetricPattern14 accessor + * Create a MetricPattern11 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern14} + * @returns {MetricPattern11} */ -function createMetricPattern14(client, name) { +function createMetricPattern11(client, name) { return { name, by: { @@ -1117,17 +1015,17 @@ function createMetricPattern14(client, name) { /** * @template T - * @typedef {{ name: string, by: { difficultyepoch: MetricEndpoint, height: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern15 + * @typedef {{ name: string, by: { difficultyepoch: MetricEndpoint, height: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern12 */ /** - * Create a MetricPattern15 accessor + * Create a MetricPattern12 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern15} + * @returns {MetricPattern12} */ -function createMetricPattern15(client, name) { +function createMetricPattern12(client, name) { return { name, by: { @@ -1147,17 +1045,17 @@ function createMetricPattern15(client, name) { /** * @template T - * @typedef {{ name: string, by: { halvingepoch: MetricEndpoint, height: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern16 + * @typedef {{ name: string, by: { halvingepoch: MetricEndpoint, height: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern13 */ /** - * Create a MetricPattern16 accessor + * Create a MetricPattern13 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern16} + * @returns {MetricPattern13} */ -function createMetricPattern16(client, name) { +function createMetricPattern13(client, name) { return { name, by: { @@ -1177,17 +1075,17 @@ function createMetricPattern16(client, name) { /** * @template T - * @typedef {{ name: string, by: { height: MetricEndpoint, txindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern17 + * @typedef {{ name: string, by: { height: MetricEndpoint, txindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern14 */ /** - * Create a MetricPattern17 accessor + * Create a MetricPattern14 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern17} + * @returns {MetricPattern14} */ -function createMetricPattern17(client, name) { +function createMetricPattern14(client, name) { return { name, by: { @@ -1207,17 +1105,17 @@ function createMetricPattern17(client, name) { /** * @template T - * @typedef {{ name: string, by: { monthindex: MetricEndpoint, quarterindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern18 + * @typedef {{ name: string, by: { monthindex: MetricEndpoint, quarterindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern15 */ /** - * Create a MetricPattern18 accessor + * Create a MetricPattern15 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern18} + * @returns {MetricPattern15} */ -function createMetricPattern18(client, name) { +function createMetricPattern15(client, name) { return { name, by: { @@ -1237,17 +1135,17 @@ function createMetricPattern18(client, name) { /** * @template T - * @typedef {{ name: string, by: { monthindex: MetricEndpoint, semesterindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern19 + * @typedef {{ name: string, by: { monthindex: MetricEndpoint, semesterindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern16 */ /** - * Create a MetricPattern19 accessor + * Create a MetricPattern16 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern19} + * @returns {MetricPattern16} */ -function createMetricPattern19(client, name) { +function createMetricPattern16(client, name) { return { name, by: { @@ -1267,17 +1165,17 @@ function createMetricPattern19(client, name) { /** * @template T - * @typedef {{ name: string, by: { monthindex: MetricEndpoint, weekindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern20 + * @typedef {{ name: string, by: { monthindex: MetricEndpoint, weekindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern17 */ /** - * Create a MetricPattern20 accessor + * Create a MetricPattern17 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern20} + * @returns {MetricPattern17} */ -function createMetricPattern20(client, name) { +function createMetricPattern17(client, name) { return { name, by: { @@ -1297,17 +1195,17 @@ function createMetricPattern20(client, name) { /** * @template T - * @typedef {{ name: string, by: { monthindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern21 + * @typedef {{ name: string, by: { monthindex: MetricEndpoint, yearindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern18 */ /** - * Create a MetricPattern21 accessor + * Create a MetricPattern18 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern21} + * @returns {MetricPattern18} */ -function createMetricPattern21(client, name) { +function createMetricPattern18(client, name) { return { name, by: { @@ -1327,17 +1225,17 @@ function createMetricPattern21(client, name) { /** * @template T - * @typedef {{ name: string, by: { dateindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern22 + * @typedef {{ name: string, by: { dateindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern19 */ /** - * Create a MetricPattern22 accessor + * Create a MetricPattern19 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern22} + * @returns {MetricPattern19} */ -function createMetricPattern22(client, name) { +function createMetricPattern19(client, name) { return { name, by: { @@ -1356,17 +1254,17 @@ function createMetricPattern22(client, name) { /** * @template T - * @typedef {{ name: string, by: { decadeindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern23 + * @typedef {{ name: string, by: { decadeindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern20 */ /** - * Create a MetricPattern23 accessor + * Create a MetricPattern20 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern23} + * @returns {MetricPattern20} */ -function createMetricPattern23(client, name) { +function createMetricPattern20(client, name) { return { name, by: { @@ -1385,17 +1283,17 @@ function createMetricPattern23(client, name) { /** * @template T - * @typedef {{ name: string, by: { difficultyepoch: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern24 + * @typedef {{ name: string, by: { difficultyepoch: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern21 */ /** - * Create a MetricPattern24 accessor + * Create a MetricPattern21 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern24} + * @returns {MetricPattern21} */ -function createMetricPattern24(client, name) { +function createMetricPattern21(client, name) { return { name, by: { @@ -1414,17 +1312,17 @@ function createMetricPattern24(client, name) { /** * @template T - * @typedef {{ name: string, by: { emptyoutputindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern25 + * @typedef {{ name: string, by: { emptyoutputindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern22 */ /** - * Create a MetricPattern25 accessor + * Create a MetricPattern22 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern25} + * @returns {MetricPattern22} */ -function createMetricPattern25(client, name) { +function createMetricPattern22(client, name) { return { name, by: { @@ -1443,17 +1341,17 @@ function createMetricPattern25(client, name) { /** * @template T - * @typedef {{ name: string, by: { height: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern26 + * @typedef {{ name: string, by: { height: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern23 */ /** - * Create a MetricPattern26 accessor + * Create a MetricPattern23 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern26} + * @returns {MetricPattern23} */ -function createMetricPattern26(client, name) { +function createMetricPattern23(client, name) { return { name, by: { @@ -1472,17 +1370,17 @@ function createMetricPattern26(client, name) { /** * @template T - * @typedef {{ name: string, by: { txinindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern27 + * @typedef {{ name: string, by: { txinindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern24 */ /** - * Create a MetricPattern27 accessor + * Create a MetricPattern24 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern27} + * @returns {MetricPattern24} */ -function createMetricPattern27(client, name) { +function createMetricPattern24(client, name) { return { name, by: { @@ -1501,17 +1399,17 @@ function createMetricPattern27(client, name) { /** * @template T - * @typedef {{ name: string, by: { opreturnindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern28 + * @typedef {{ name: string, by: { opreturnindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern25 */ /** - * Create a MetricPattern28 accessor + * Create a MetricPattern25 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern28} + * @returns {MetricPattern25} */ -function createMetricPattern28(client, name) { +function createMetricPattern25(client, name) { return { name, by: { @@ -1530,17 +1428,17 @@ function createMetricPattern28(client, name) { /** * @template T - * @typedef {{ name: string, by: { txoutindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern29 + * @typedef {{ name: string, by: { txoutindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern26 */ /** - * Create a MetricPattern29 accessor + * Create a MetricPattern26 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern29} + * @returns {MetricPattern26} */ -function createMetricPattern29(client, name) { +function createMetricPattern26(client, name) { return { name, by: { @@ -1559,17 +1457,17 @@ function createMetricPattern29(client, name) { /** * @template T - * @typedef {{ name: string, by: { p2aaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern30 + * @typedef {{ name: string, by: { p2aaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern27 */ /** - * Create a MetricPattern30 accessor + * Create a MetricPattern27 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern30} + * @returns {MetricPattern27} */ -function createMetricPattern30(client, name) { +function createMetricPattern27(client, name) { return { name, by: { @@ -1588,17 +1486,17 @@ function createMetricPattern30(client, name) { /** * @template T - * @typedef {{ name: string, by: { p2msoutputindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern31 + * @typedef {{ name: string, by: { p2msoutputindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern28 */ /** - * Create a MetricPattern31 accessor + * Create a MetricPattern28 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern31} + * @returns {MetricPattern28} */ -function createMetricPattern31(client, name) { +function createMetricPattern28(client, name) { return { name, by: { @@ -1617,17 +1515,17 @@ function createMetricPattern31(client, name) { /** * @template T - * @typedef {{ name: string, by: { p2pk33addressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern32 + * @typedef {{ name: string, by: { p2pk33addressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern29 */ /** - * Create a MetricPattern32 accessor + * Create a MetricPattern29 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern32} + * @returns {MetricPattern29} */ -function createMetricPattern32(client, name) { +function createMetricPattern29(client, name) { return { name, by: { @@ -1646,17 +1544,17 @@ function createMetricPattern32(client, name) { /** * @template T - * @typedef {{ name: string, by: { p2pk65addressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern33 + * @typedef {{ name: string, by: { p2pk65addressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern30 */ /** - * Create a MetricPattern33 accessor + * Create a MetricPattern30 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern33} + * @returns {MetricPattern30} */ -function createMetricPattern33(client, name) { +function createMetricPattern30(client, name) { return { name, by: { @@ -1675,17 +1573,17 @@ function createMetricPattern33(client, name) { /** * @template T - * @typedef {{ name: string, by: { p2pkhaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern34 + * @typedef {{ name: string, by: { p2pkhaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern31 */ /** - * Create a MetricPattern34 accessor + * Create a MetricPattern31 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern34} + * @returns {MetricPattern31} */ -function createMetricPattern34(client, name) { +function createMetricPattern31(client, name) { return { name, by: { @@ -1704,17 +1602,17 @@ function createMetricPattern34(client, name) { /** * @template T - * @typedef {{ name: string, by: { p2shaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern35 + * @typedef {{ name: string, by: { p2shaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern32 */ /** - * Create a MetricPattern35 accessor + * Create a MetricPattern32 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern35} + * @returns {MetricPattern32} */ -function createMetricPattern35(client, name) { +function createMetricPattern32(client, name) { return { name, by: { @@ -1733,17 +1631,17 @@ function createMetricPattern35(client, name) { /** * @template T - * @typedef {{ name: string, by: { p2traddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern36 + * @typedef {{ name: string, by: { p2traddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern33 */ /** - * Create a MetricPattern36 accessor + * Create a MetricPattern33 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern36} + * @returns {MetricPattern33} */ -function createMetricPattern36(client, name) { +function createMetricPattern33(client, name) { return { name, by: { @@ -1762,17 +1660,17 @@ function createMetricPattern36(client, name) { /** * @template T - * @typedef {{ name: string, by: { p2wpkhaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern37 + * @typedef {{ name: string, by: { p2wpkhaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern34 */ /** - * Create a MetricPattern37 accessor + * Create a MetricPattern34 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern37} + * @returns {MetricPattern34} */ -function createMetricPattern37(client, name) { +function createMetricPattern34(client, name) { return { name, by: { @@ -1791,17 +1689,17 @@ function createMetricPattern37(client, name) { /** * @template T - * @typedef {{ name: string, by: { p2wshaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern38 + * @typedef {{ name: string, by: { p2wshaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern35 */ /** - * Create a MetricPattern38 accessor + * Create a MetricPattern35 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern38} + * @returns {MetricPattern35} */ -function createMetricPattern38(client, name) { +function createMetricPattern35(client, name) { return { name, by: { @@ -1820,17 +1718,17 @@ function createMetricPattern38(client, name) { /** * @template T - * @typedef {{ name: string, by: { txindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern39 + * @typedef {{ name: string, by: { txindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern36 */ /** - * Create a MetricPattern39 accessor + * Create a MetricPattern36 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern39} + * @returns {MetricPattern36} */ -function createMetricPattern39(client, name) { +function createMetricPattern36(client, name) { return { name, by: { @@ -1849,17 +1747,17 @@ function createMetricPattern39(client, name) { /** * @template T - * @typedef {{ name: string, by: { unknownoutputindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern40 + * @typedef {{ name: string, by: { unknownoutputindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern37 */ /** - * Create a MetricPattern40 accessor + * Create a MetricPattern37 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern40} + * @returns {MetricPattern37} */ -function createMetricPattern40(client, name) { +function createMetricPattern37(client, name) { return { name, by: { @@ -1878,17 +1776,17 @@ function createMetricPattern40(client, name) { /** * @template T - * @typedef {{ name: string, by: { loadedaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern41 + * @typedef {{ name: string, by: { loadedaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern38 */ /** - * Create a MetricPattern41 accessor + * Create a MetricPattern38 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern41} + * @returns {MetricPattern38} */ -function createMetricPattern41(client, name) { +function createMetricPattern38(client, name) { return { name, by: { @@ -1907,17 +1805,17 @@ function createMetricPattern41(client, name) { /** * @template T - * @typedef {{ name: string, by: { emptyaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern42 + * @typedef {{ name: string, by: { emptyaddressindex: MetricEndpoint }, indexes: () => Index[], get: (index: Index) => MetricEndpoint|undefined }} MetricPattern39 */ /** - * Create a MetricPattern42 accessor + * Create a MetricPattern39 accessor * @template T * @param {BrkClientBase} client * @param {string} name - The metric vec name - * @returns {MetricPattern42} + * @returns {MetricPattern39} */ -function createMetricPattern42(client, name) { +function createMetricPattern39(client, name) { return { name, by: { @@ -1938,20 +1836,20 @@ function createMetricPattern42(client, name) { /** * @typedef {Object} RealizedPattern3 - * @property {MetricPattern22} adjustedSopr - * @property {MetricPattern22} adjustedSopr30dEma - * @property {MetricPattern22} adjustedSopr7dEma + * @property {MetricPattern19} adjustedSopr + * @property {MetricPattern19} adjustedSopr30dEma + * @property {MetricPattern19} adjustedSopr7dEma * @property {MetricPattern1} adjustedValueCreated * @property {MetricPattern1} adjustedValueDestroyed - * @property {MetricPattern5} mvrv + * @property {MetricPattern4} mvrv * @property {BlockCountPattern} negRealizedLoss * @property {BlockCountPattern} netRealizedPnl - * @property {MetricPattern5} netRealizedPnlCumulative30dDelta - * @property {MetricPattern5} netRealizedPnlCumulative30dDeltaRelToMarketCap - * @property {MetricPattern5} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDelta + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToRealizedCap * @property {BlockCountPattern} netRealizedPnlRelToRealizedCap * @property {MetricPattern1} realizedCap - * @property {MetricPattern5} realizedCap30dDelta + * @property {MetricPattern4} realizedCap30dDelta * @property {MetricPattern1} realizedCapRelToOwnMarketCap * @property {BlockCountPattern} realizedLoss * @property {BlockCountPattern} realizedLossRelToRealizedCap @@ -1959,19 +1857,17 @@ function createMetricPattern42(client, name) { * @property {ActivePriceRatioPattern} realizedPriceExtra * @property {BlockCountPattern} realizedProfit * @property {BlockCountPattern} realizedProfitRelToRealizedCap - * @property {MetricPattern22} realizedProfitToLossRatio - * @property {DifficultyAdjustmentPattern} realizedValue - * @property {MetricPattern22} sellSideRiskRatio - * @property {MetricPattern22} sellSideRiskRatio30dEma - * @property {MetricPattern22} sellSideRiskRatio7dEma - * @property {MetricPattern22} sopr - * @property {MetricPattern22} sopr30dEma - * @property {MetricPattern22} sopr7dEma + * @property {MetricPattern19} realizedProfitToLossRatio + * @property {MetricPattern1} realizedValue + * @property {MetricPattern19} sellSideRiskRatio + * @property {MetricPattern19} sellSideRiskRatio30dEma + * @property {MetricPattern19} sellSideRiskRatio7dEma + * @property {MetricPattern19} sopr + * @property {MetricPattern19} sopr30dEma + * @property {MetricPattern19} sopr7dEma * @property {MetricPattern1} totalRealizedPnl - * @property {MetricPattern26} valueCreated - * @property {MetricPattern2} valueCreatedSum - * @property {MetricPattern26} valueDestroyed - * @property {MetricPattern2} valueDestroyedSum + * @property {MetricPattern1} valueCreated + * @property {MetricPattern1} valueDestroyed */ /** @@ -1982,20 +1878,20 @@ function createMetricPattern42(client, name) { */ function createRealizedPattern3(client, acc) { return { - adjustedSopr: createMetricPattern22(client, _m(acc, 'adjusted_sopr')), - adjustedSopr30dEma: createMetricPattern22(client, _m(acc, 'adjusted_sopr_30d_ema')), - adjustedSopr7dEma: createMetricPattern22(client, _m(acc, 'adjusted_sopr_7d_ema')), + adjustedSopr: createMetricPattern19(client, _m(acc, 'adjusted_sopr')), + adjustedSopr30dEma: createMetricPattern19(client, _m(acc, 'adjusted_sopr_30d_ema')), + adjustedSopr7dEma: createMetricPattern19(client, _m(acc, 'adjusted_sopr_7d_ema')), adjustedValueCreated: createMetricPattern1(client, _m(acc, 'adjusted_value_created')), adjustedValueDestroyed: createMetricPattern1(client, _m(acc, 'adjusted_value_destroyed')), - mvrv: createMetricPattern5(client, _m(acc, 'mvrv')), + mvrv: createMetricPattern4(client, _m(acc, 'mvrv')), negRealizedLoss: createBlockCountPattern(client, _m(acc, 'neg_realized_loss')), netRealizedPnl: createBlockCountPattern(client, _m(acc, 'net_realized_pnl')), - netRealizedPnlCumulative30dDelta: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), - netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), - netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), + netRealizedPnlCumulative30dDelta: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), netRealizedPnlRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')), realizedCap: createMetricPattern1(client, _m(acc, 'realized_cap')), - realizedCap30dDelta: createMetricPattern5(client, _m(acc, 'realized_cap_30d_delta')), + realizedCap30dDelta: createMetricPattern4(client, _m(acc, 'realized_cap_30d_delta')), realizedCapRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap')), realizedLoss: createBlockCountPattern(client, _m(acc, 'realized_loss')), realizedLossRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_loss_rel_to_realized_cap')), @@ -2003,56 +1899,52 @@ function createRealizedPattern3(client, acc) { realizedPriceExtra: createActivePriceRatioPattern(client, _m(acc, 'realized_price_ratio')), realizedProfit: createBlockCountPattern(client, _m(acc, 'realized_profit')), realizedProfitRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_profit_rel_to_realized_cap')), - realizedProfitToLossRatio: createMetricPattern22(client, _m(acc, 'realized_profit_to_loss_ratio')), - realizedValue: createDifficultyAdjustmentPattern(client, _m(acc, 'realized_value')), - sellSideRiskRatio: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio')), - sellSideRiskRatio30dEma: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), - sellSideRiskRatio7dEma: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), - sopr: createMetricPattern22(client, _m(acc, 'sopr')), - sopr30dEma: createMetricPattern22(client, _m(acc, 'sopr_30d_ema')), - sopr7dEma: createMetricPattern22(client, _m(acc, 'sopr_7d_ema')), + realizedProfitToLossRatio: createMetricPattern19(client, _m(acc, 'realized_profit_to_loss_ratio')), + realizedValue: createMetricPattern1(client, _m(acc, 'realized_value')), + sellSideRiskRatio: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio')), + sellSideRiskRatio30dEma: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), + sellSideRiskRatio7dEma: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), + sopr: createMetricPattern19(client, _m(acc, 'sopr')), + sopr30dEma: createMetricPattern19(client, _m(acc, 'sopr_30d_ema')), + sopr7dEma: createMetricPattern19(client, _m(acc, 'sopr_7d_ema')), totalRealizedPnl: createMetricPattern1(client, _m(acc, 'total_realized_pnl')), - valueCreated: createMetricPattern26(client, _m(acc, 'value_created')), - valueCreatedSum: createMetricPattern2(client, _m(acc, 'value_created_sum')), - valueDestroyed: createMetricPattern26(client, _m(acc, 'value_destroyed')), - valueDestroyedSum: createMetricPattern2(client, _m(acc, 'value_destroyed_sum')), + valueCreated: createMetricPattern1(client, _m(acc, 'value_created')), + valueDestroyed: createMetricPattern1(client, _m(acc, 'value_destroyed')), }; } /** * @typedef {Object} RealizedPattern4 - * @property {MetricPattern22} adjustedSopr - * @property {MetricPattern22} adjustedSopr30dEma - * @property {MetricPattern22} adjustedSopr7dEma + * @property {MetricPattern19} adjustedSopr + * @property {MetricPattern19} adjustedSopr30dEma + * @property {MetricPattern19} adjustedSopr7dEma * @property {MetricPattern1} adjustedValueCreated * @property {MetricPattern1} adjustedValueDestroyed - * @property {MetricPattern5} mvrv + * @property {MetricPattern4} mvrv * @property {BlockCountPattern} negRealizedLoss * @property {BlockCountPattern} netRealizedPnl - * @property {MetricPattern5} netRealizedPnlCumulative30dDelta - * @property {MetricPattern5} netRealizedPnlCumulative30dDeltaRelToMarketCap - * @property {MetricPattern5} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDelta + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToRealizedCap * @property {BlockCountPattern} netRealizedPnlRelToRealizedCap * @property {MetricPattern1} realizedCap - * @property {MetricPattern5} realizedCap30dDelta + * @property {MetricPattern4} realizedCap30dDelta * @property {BlockCountPattern} realizedLoss * @property {BlockCountPattern} realizedLossRelToRealizedCap * @property {MetricPattern1} realizedPrice * @property {RealizedPriceExtraPattern} realizedPriceExtra * @property {BlockCountPattern} realizedProfit * @property {BlockCountPattern} realizedProfitRelToRealizedCap - * @property {DifficultyAdjustmentPattern} realizedValue - * @property {MetricPattern22} sellSideRiskRatio - * @property {MetricPattern22} sellSideRiskRatio30dEma - * @property {MetricPattern22} sellSideRiskRatio7dEma - * @property {MetricPattern22} sopr - * @property {MetricPattern22} sopr30dEma - * @property {MetricPattern22} sopr7dEma + * @property {MetricPattern1} realizedValue + * @property {MetricPattern19} sellSideRiskRatio + * @property {MetricPattern19} sellSideRiskRatio30dEma + * @property {MetricPattern19} sellSideRiskRatio7dEma + * @property {MetricPattern19} sopr + * @property {MetricPattern19} sopr30dEma + * @property {MetricPattern19} sopr7dEma * @property {MetricPattern1} totalRealizedPnl - * @property {MetricPattern26} valueCreated - * @property {MetricPattern2} valueCreatedSum - * @property {MetricPattern26} valueDestroyed - * @property {MetricPattern2} valueDestroyedSum + * @property {MetricPattern1} valueCreated + * @property {MetricPattern1} valueDestroyed */ /** @@ -2063,144 +1955,69 @@ function createRealizedPattern3(client, acc) { */ function createRealizedPattern4(client, acc) { return { - adjustedSopr: createMetricPattern22(client, _m(acc, 'adjusted_sopr')), - adjustedSopr30dEma: createMetricPattern22(client, _m(acc, 'adjusted_sopr_30d_ema')), - adjustedSopr7dEma: createMetricPattern22(client, _m(acc, 'adjusted_sopr_7d_ema')), + adjustedSopr: createMetricPattern19(client, _m(acc, 'adjusted_sopr')), + adjustedSopr30dEma: createMetricPattern19(client, _m(acc, 'adjusted_sopr_30d_ema')), + adjustedSopr7dEma: createMetricPattern19(client, _m(acc, 'adjusted_sopr_7d_ema')), adjustedValueCreated: createMetricPattern1(client, _m(acc, 'adjusted_value_created')), adjustedValueDestroyed: createMetricPattern1(client, _m(acc, 'adjusted_value_destroyed')), - mvrv: createMetricPattern5(client, _m(acc, 'mvrv')), + mvrv: createMetricPattern4(client, _m(acc, 'mvrv')), negRealizedLoss: createBlockCountPattern(client, _m(acc, 'neg_realized_loss')), netRealizedPnl: createBlockCountPattern(client, _m(acc, 'net_realized_pnl')), - netRealizedPnlCumulative30dDelta: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), - netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), - netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), + netRealizedPnlCumulative30dDelta: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), netRealizedPnlRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')), realizedCap: createMetricPattern1(client, _m(acc, 'realized_cap')), - realizedCap30dDelta: createMetricPattern5(client, _m(acc, 'realized_cap_30d_delta')), + realizedCap30dDelta: createMetricPattern4(client, _m(acc, 'realized_cap_30d_delta')), realizedLoss: createBlockCountPattern(client, _m(acc, 'realized_loss')), realizedLossRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_loss_rel_to_realized_cap')), realizedPrice: createMetricPattern1(client, _m(acc, 'realized_price')), realizedPriceExtra: createRealizedPriceExtraPattern(client, _m(acc, 'realized_price')), realizedProfit: createBlockCountPattern(client, _m(acc, 'realized_profit')), realizedProfitRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_profit_rel_to_realized_cap')), - realizedValue: createDifficultyAdjustmentPattern(client, _m(acc, 'realized_value')), - sellSideRiskRatio: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio')), - sellSideRiskRatio30dEma: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), - sellSideRiskRatio7dEma: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), - sopr: createMetricPattern22(client, _m(acc, 'sopr')), - sopr30dEma: createMetricPattern22(client, _m(acc, 'sopr_30d_ema')), - sopr7dEma: createMetricPattern22(client, _m(acc, 'sopr_7d_ema')), + realizedValue: createMetricPattern1(client, _m(acc, 'realized_value')), + sellSideRiskRatio: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio')), + sellSideRiskRatio30dEma: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), + sellSideRiskRatio7dEma: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), + sopr: createMetricPattern19(client, _m(acc, 'sopr')), + sopr30dEma: createMetricPattern19(client, _m(acc, 'sopr_30d_ema')), + sopr7dEma: createMetricPattern19(client, _m(acc, 'sopr_7d_ema')), totalRealizedPnl: createMetricPattern1(client, _m(acc, 'total_realized_pnl')), - valueCreated: createMetricPattern26(client, _m(acc, 'value_created')), - valueCreatedSum: createMetricPattern2(client, _m(acc, 'value_created_sum')), - valueDestroyed: createMetricPattern26(client, _m(acc, 'value_destroyed')), - valueDestroyedSum: createMetricPattern2(client, _m(acc, 'value_destroyed_sum')), - }; -} - -/** - * @typedef {Object} RealizedPattern2 - * @property {MetricPattern5} mvrv - * @property {BlockCountPattern} negRealizedLoss - * @property {BlockCountPattern} netRealizedPnl - * @property {MetricPattern5} netRealizedPnlCumulative30dDelta - * @property {MetricPattern5} netRealizedPnlCumulative30dDeltaRelToMarketCap - * @property {MetricPattern5} netRealizedPnlCumulative30dDeltaRelToRealizedCap - * @property {BlockCountPattern} netRealizedPnlRelToRealizedCap - * @property {MetricPattern1} realizedCap - * @property {MetricPattern5} realizedCap30dDelta - * @property {MetricPattern1} realizedCapRelToOwnMarketCap - * @property {BlockCountPattern} realizedLoss - * @property {BlockCountPattern} realizedLossRelToRealizedCap - * @property {MetricPattern1} realizedPrice - * @property {ActivePriceRatioPattern} realizedPriceExtra - * @property {BlockCountPattern} realizedProfit - * @property {BlockCountPattern} realizedProfitRelToRealizedCap - * @property {MetricPattern22} realizedProfitToLossRatio - * @property {DifficultyAdjustmentPattern} realizedValue - * @property {MetricPattern22} sellSideRiskRatio - * @property {MetricPattern22} sellSideRiskRatio30dEma - * @property {MetricPattern22} sellSideRiskRatio7dEma - * @property {MetricPattern22} sopr - * @property {MetricPattern22} sopr30dEma - * @property {MetricPattern22} sopr7dEma - * @property {MetricPattern1} totalRealizedPnl - * @property {MetricPattern26} valueCreated - * @property {MetricPattern2} valueCreatedSum - * @property {MetricPattern26} valueDestroyed - * @property {MetricPattern2} valueDestroyedSum - */ - -/** - * Create a RealizedPattern2 pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {RealizedPattern2} - */ -function createRealizedPattern2(client, acc) { - return { - mvrv: createMetricPattern5(client, _m(acc, 'mvrv')), - negRealizedLoss: createBlockCountPattern(client, _m(acc, 'neg_realized_loss')), - netRealizedPnl: createBlockCountPattern(client, _m(acc, 'net_realized_pnl')), - netRealizedPnlCumulative30dDelta: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), - netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), - netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), - netRealizedPnlRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')), - realizedCap: createMetricPattern1(client, _m(acc, 'realized_cap')), - realizedCap30dDelta: createMetricPattern5(client, _m(acc, 'realized_cap_30d_delta')), - realizedCapRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap')), - realizedLoss: createBlockCountPattern(client, _m(acc, 'realized_loss')), - realizedLossRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_loss_rel_to_realized_cap')), - realizedPrice: createMetricPattern1(client, _m(acc, 'realized_price')), - realizedPriceExtra: createActivePriceRatioPattern(client, _m(acc, 'realized_price_ratio')), - realizedProfit: createBlockCountPattern(client, _m(acc, 'realized_profit')), - realizedProfitRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_profit_rel_to_realized_cap')), - realizedProfitToLossRatio: createMetricPattern22(client, _m(acc, 'realized_profit_to_loss_ratio')), - realizedValue: createDifficultyAdjustmentPattern(client, _m(acc, 'realized_value')), - sellSideRiskRatio: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio')), - sellSideRiskRatio30dEma: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), - sellSideRiskRatio7dEma: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), - sopr: createMetricPattern22(client, _m(acc, 'sopr')), - sopr30dEma: createMetricPattern22(client, _m(acc, 'sopr_30d_ema')), - sopr7dEma: createMetricPattern22(client, _m(acc, 'sopr_7d_ema')), - totalRealizedPnl: createMetricPattern1(client, _m(acc, 'total_realized_pnl')), - valueCreated: createMetricPattern26(client, _m(acc, 'value_created')), - valueCreatedSum: createMetricPattern2(client, _m(acc, 'value_created_sum')), - valueDestroyed: createMetricPattern26(client, _m(acc, 'value_destroyed')), - valueDestroyedSum: createMetricPattern2(client, _m(acc, 'value_destroyed_sum')), + valueCreated: createMetricPattern1(client, _m(acc, 'value_created')), + valueDestroyed: createMetricPattern1(client, _m(acc, 'value_destroyed')), }; } /** * @typedef {Object} Ratio1ySdPattern - * @property {MetricPattern5} _0sdUsd - * @property {MetricPattern5} m05sd - * @property {MetricPattern5} m05sdUsd - * @property {MetricPattern5} m15sd - * @property {MetricPattern5} m15sdUsd - * @property {MetricPattern5} m1sd - * @property {MetricPattern5} m1sdUsd - * @property {MetricPattern5} m25sd - * @property {MetricPattern5} m25sdUsd - * @property {MetricPattern5} m2sd - * @property {MetricPattern5} m2sdUsd - * @property {MetricPattern5} m3sd - * @property {MetricPattern5} m3sdUsd - * @property {MetricPattern5} p05sd - * @property {MetricPattern5} p05sdUsd - * @property {MetricPattern5} p15sd - * @property {MetricPattern5} p15sdUsd - * @property {MetricPattern5} p1sd - * @property {MetricPattern5} p1sdUsd - * @property {MetricPattern5} p25sd - * @property {MetricPattern5} p25sdUsd - * @property {MetricPattern5} p2sd - * @property {MetricPattern5} p2sdUsd - * @property {MetricPattern5} p3sd - * @property {MetricPattern5} p3sdUsd - * @property {MetricPattern5} sd - * @property {MetricPattern5} sma - * @property {MetricPattern5} zscore + * @property {MetricPattern4} _0sdUsd + * @property {MetricPattern4} m05sd + * @property {MetricPattern4} m05sdUsd + * @property {MetricPattern4} m15sd + * @property {MetricPattern4} m15sdUsd + * @property {MetricPattern4} m1sd + * @property {MetricPattern4} m1sdUsd + * @property {MetricPattern4} m25sd + * @property {MetricPattern4} m25sdUsd + * @property {MetricPattern4} m2sd + * @property {MetricPattern4} m2sdUsd + * @property {MetricPattern4} m3sd + * @property {MetricPattern4} m3sdUsd + * @property {MetricPattern4} p05sd + * @property {MetricPattern4} p05sdUsd + * @property {MetricPattern4} p15sd + * @property {MetricPattern4} p15sdUsd + * @property {MetricPattern4} p1sd + * @property {MetricPattern4} p1sdUsd + * @property {MetricPattern4} p25sd + * @property {MetricPattern4} p25sdUsd + * @property {MetricPattern4} p2sd + * @property {MetricPattern4} p2sdUsd + * @property {MetricPattern4} p3sd + * @property {MetricPattern4} p3sdUsd + * @property {MetricPattern4} sd + * @property {MetricPattern4} sma + * @property {MetricPattern4} zscore */ /** @@ -2211,66 +2028,133 @@ function createRealizedPattern2(client, acc) { */ function createRatio1ySdPattern(client, acc) { return { - _0sdUsd: createMetricPattern5(client, _m(acc, '0sd_usd')), - m05sd: createMetricPattern5(client, _m(acc, 'm0_5sd')), - m05sdUsd: createMetricPattern5(client, _m(acc, 'm0_5sd_usd')), - m15sd: createMetricPattern5(client, _m(acc, 'm1_5sd')), - m15sdUsd: createMetricPattern5(client, _m(acc, 'm1_5sd_usd')), - m1sd: createMetricPattern5(client, _m(acc, 'm1sd')), - m1sdUsd: createMetricPattern5(client, _m(acc, 'm1sd_usd')), - m25sd: createMetricPattern5(client, _m(acc, 'm2_5sd')), - m25sdUsd: createMetricPattern5(client, _m(acc, 'm2_5sd_usd')), - m2sd: createMetricPattern5(client, _m(acc, 'm2sd')), - m2sdUsd: createMetricPattern5(client, _m(acc, 'm2sd_usd')), - m3sd: createMetricPattern5(client, _m(acc, 'm3sd')), - m3sdUsd: createMetricPattern5(client, _m(acc, 'm3sd_usd')), - p05sd: createMetricPattern5(client, _m(acc, 'p0_5sd')), - p05sdUsd: createMetricPattern5(client, _m(acc, 'p0_5sd_usd')), - p15sd: createMetricPattern5(client, _m(acc, 'p1_5sd')), - p15sdUsd: createMetricPattern5(client, _m(acc, 'p1_5sd_usd')), - p1sd: createMetricPattern5(client, _m(acc, 'p1sd')), - p1sdUsd: createMetricPattern5(client, _m(acc, 'p1sd_usd')), - p25sd: createMetricPattern5(client, _m(acc, 'p2_5sd')), - p25sdUsd: createMetricPattern5(client, _m(acc, 'p2_5sd_usd')), - p2sd: createMetricPattern5(client, _m(acc, 'p2sd')), - p2sdUsd: createMetricPattern5(client, _m(acc, 'p2sd_usd')), - p3sd: createMetricPattern5(client, _m(acc, 'p3sd')), - p3sdUsd: createMetricPattern5(client, _m(acc, 'p3sd_usd')), - sd: createMetricPattern5(client, _m(acc, 'sd')), - sma: createMetricPattern5(client, _m(acc, 'sma')), - zscore: createMetricPattern5(client, _m(acc, 'zscore')), + _0sdUsd: createMetricPattern4(client, _m(acc, '0sd_usd')), + m05sd: createMetricPattern4(client, _m(acc, 'm0_5sd')), + m05sdUsd: createMetricPattern4(client, _m(acc, 'm0_5sd_usd')), + m15sd: createMetricPattern4(client, _m(acc, 'm1_5sd')), + m15sdUsd: createMetricPattern4(client, _m(acc, 'm1_5sd_usd')), + m1sd: createMetricPattern4(client, _m(acc, 'm1sd')), + m1sdUsd: createMetricPattern4(client, _m(acc, 'm1sd_usd')), + m25sd: createMetricPattern4(client, _m(acc, 'm2_5sd')), + m25sdUsd: createMetricPattern4(client, _m(acc, 'm2_5sd_usd')), + m2sd: createMetricPattern4(client, _m(acc, 'm2sd')), + m2sdUsd: createMetricPattern4(client, _m(acc, 'm2sd_usd')), + m3sd: createMetricPattern4(client, _m(acc, 'm3sd')), + m3sdUsd: createMetricPattern4(client, _m(acc, 'm3sd_usd')), + p05sd: createMetricPattern4(client, _m(acc, 'p0_5sd')), + p05sdUsd: createMetricPattern4(client, _m(acc, 'p0_5sd_usd')), + p15sd: createMetricPattern4(client, _m(acc, 'p1_5sd')), + p15sdUsd: createMetricPattern4(client, _m(acc, 'p1_5sd_usd')), + p1sd: createMetricPattern4(client, _m(acc, 'p1sd')), + p1sdUsd: createMetricPattern4(client, _m(acc, 'p1sd_usd')), + p25sd: createMetricPattern4(client, _m(acc, 'p2_5sd')), + p25sdUsd: createMetricPattern4(client, _m(acc, 'p2_5sd_usd')), + p2sd: createMetricPattern4(client, _m(acc, 'p2sd')), + p2sdUsd: createMetricPattern4(client, _m(acc, 'p2sd_usd')), + p3sd: createMetricPattern4(client, _m(acc, 'p3sd')), + p3sdUsd: createMetricPattern4(client, _m(acc, 'p3sd_usd')), + sd: createMetricPattern4(client, _m(acc, 'sd')), + sma: createMetricPattern4(client, _m(acc, 'sma')), + zscore: createMetricPattern4(client, _m(acc, 'zscore')), + }; +} + +/** + * @typedef {Object} RealizedPattern2 + * @property {MetricPattern4} mvrv + * @property {BlockCountPattern} negRealizedLoss + * @property {BlockCountPattern} netRealizedPnl + * @property {MetricPattern4} netRealizedPnlCumulative30dDelta + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {BlockCountPattern} netRealizedPnlRelToRealizedCap + * @property {MetricPattern1} realizedCap + * @property {MetricPattern4} realizedCap30dDelta + * @property {MetricPattern1} realizedCapRelToOwnMarketCap + * @property {BlockCountPattern} realizedLoss + * @property {BlockCountPattern} realizedLossRelToRealizedCap + * @property {MetricPattern1} realizedPrice + * @property {ActivePriceRatioPattern} realizedPriceExtra + * @property {BlockCountPattern} realizedProfit + * @property {BlockCountPattern} realizedProfitRelToRealizedCap + * @property {MetricPattern19} realizedProfitToLossRatio + * @property {MetricPattern1} realizedValue + * @property {MetricPattern19} sellSideRiskRatio + * @property {MetricPattern19} sellSideRiskRatio30dEma + * @property {MetricPattern19} sellSideRiskRatio7dEma + * @property {MetricPattern19} sopr + * @property {MetricPattern19} sopr30dEma + * @property {MetricPattern19} sopr7dEma + * @property {MetricPattern1} totalRealizedPnl + * @property {MetricPattern1} valueCreated + * @property {MetricPattern1} valueDestroyed + */ + +/** + * Create a RealizedPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {RealizedPattern2} + */ +function createRealizedPattern2(client, acc) { + return { + mvrv: createMetricPattern4(client, _m(acc, 'mvrv')), + negRealizedLoss: createBlockCountPattern(client, _m(acc, 'neg_realized_loss')), + netRealizedPnl: createBlockCountPattern(client, _m(acc, 'net_realized_pnl')), + netRealizedPnlCumulative30dDelta: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), + netRealizedPnlRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')), + realizedCap: createMetricPattern1(client, _m(acc, 'realized_cap')), + realizedCap30dDelta: createMetricPattern4(client, _m(acc, 'realized_cap_30d_delta')), + realizedCapRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap')), + realizedLoss: createBlockCountPattern(client, _m(acc, 'realized_loss')), + realizedLossRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_loss_rel_to_realized_cap')), + realizedPrice: createMetricPattern1(client, _m(acc, 'realized_price')), + realizedPriceExtra: createActivePriceRatioPattern(client, _m(acc, 'realized_price_ratio')), + realizedProfit: createBlockCountPattern(client, _m(acc, 'realized_profit')), + realizedProfitRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_profit_rel_to_realized_cap')), + realizedProfitToLossRatio: createMetricPattern19(client, _m(acc, 'realized_profit_to_loss_ratio')), + realizedValue: createMetricPattern1(client, _m(acc, 'realized_value')), + sellSideRiskRatio: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio')), + sellSideRiskRatio30dEma: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), + sellSideRiskRatio7dEma: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), + sopr: createMetricPattern19(client, _m(acc, 'sopr')), + sopr30dEma: createMetricPattern19(client, _m(acc, 'sopr_30d_ema')), + sopr7dEma: createMetricPattern19(client, _m(acc, 'sopr_7d_ema')), + totalRealizedPnl: createMetricPattern1(client, _m(acc, 'total_realized_pnl')), + valueCreated: createMetricPattern1(client, _m(acc, 'value_created')), + valueDestroyed: createMetricPattern1(client, _m(acc, 'value_destroyed')), }; } /** * @typedef {Object} RealizedPattern - * @property {MetricPattern5} mvrv + * @property {MetricPattern4} mvrv * @property {BlockCountPattern} negRealizedLoss * @property {BlockCountPattern} netRealizedPnl - * @property {MetricPattern5} netRealizedPnlCumulative30dDelta - * @property {MetricPattern5} netRealizedPnlCumulative30dDeltaRelToMarketCap - * @property {MetricPattern5} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDelta + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {MetricPattern4} netRealizedPnlCumulative30dDeltaRelToRealizedCap * @property {BlockCountPattern} netRealizedPnlRelToRealizedCap * @property {MetricPattern1} realizedCap - * @property {MetricPattern5} realizedCap30dDelta + * @property {MetricPattern4} realizedCap30dDelta * @property {BlockCountPattern} realizedLoss * @property {BlockCountPattern} realizedLossRelToRealizedCap * @property {MetricPattern1} realizedPrice * @property {RealizedPriceExtraPattern} realizedPriceExtra * @property {BlockCountPattern} realizedProfit * @property {BlockCountPattern} realizedProfitRelToRealizedCap - * @property {DifficultyAdjustmentPattern} realizedValue - * @property {MetricPattern22} sellSideRiskRatio - * @property {MetricPattern22} sellSideRiskRatio30dEma - * @property {MetricPattern22} sellSideRiskRatio7dEma - * @property {MetricPattern22} sopr - * @property {MetricPattern22} sopr30dEma - * @property {MetricPattern22} sopr7dEma + * @property {MetricPattern1} realizedValue + * @property {MetricPattern19} sellSideRiskRatio + * @property {MetricPattern19} sellSideRiskRatio30dEma + * @property {MetricPattern19} sellSideRiskRatio7dEma + * @property {MetricPattern19} sopr + * @property {MetricPattern19} sopr30dEma + * @property {MetricPattern19} sopr7dEma * @property {MetricPattern1} totalRealizedPnl - * @property {MetricPattern26} valueCreated - * @property {MetricPattern2} valueCreatedSum - * @property {MetricPattern26} valueDestroyed - * @property {MetricPattern2} valueDestroyedSum + * @property {MetricPattern1} valueCreated + * @property {MetricPattern1} valueDestroyed */ /** @@ -2281,57 +2165,55 @@ function createRatio1ySdPattern(client, acc) { */ function createRealizedPattern(client, acc) { return { - mvrv: createMetricPattern5(client, _m(acc, 'mvrv')), + mvrv: createMetricPattern4(client, _m(acc, 'mvrv')), negRealizedLoss: createBlockCountPattern(client, _m(acc, 'neg_realized_loss')), netRealizedPnl: createBlockCountPattern(client, _m(acc, 'net_realized_pnl')), - netRealizedPnlCumulative30dDelta: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), - netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), - netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), + netRealizedPnlCumulative30dDelta: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createMetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')), netRealizedPnlRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')), realizedCap: createMetricPattern1(client, _m(acc, 'realized_cap')), - realizedCap30dDelta: createMetricPattern5(client, _m(acc, 'realized_cap_30d_delta')), + realizedCap30dDelta: createMetricPattern4(client, _m(acc, 'realized_cap_30d_delta')), realizedLoss: createBlockCountPattern(client, _m(acc, 'realized_loss')), realizedLossRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_loss_rel_to_realized_cap')), realizedPrice: createMetricPattern1(client, _m(acc, 'realized_price')), realizedPriceExtra: createRealizedPriceExtraPattern(client, _m(acc, 'realized_price')), realizedProfit: createBlockCountPattern(client, _m(acc, 'realized_profit')), realizedProfitRelToRealizedCap: createBlockCountPattern(client, _m(acc, 'realized_profit_rel_to_realized_cap')), - realizedValue: createDifficultyAdjustmentPattern(client, _m(acc, 'realized_value')), - sellSideRiskRatio: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio')), - sellSideRiskRatio30dEma: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), - sellSideRiskRatio7dEma: createMetricPattern22(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), - sopr: createMetricPattern22(client, _m(acc, 'sopr')), - sopr30dEma: createMetricPattern22(client, _m(acc, 'sopr_30d_ema')), - sopr7dEma: createMetricPattern22(client, _m(acc, 'sopr_7d_ema')), + realizedValue: createMetricPattern1(client, _m(acc, 'realized_value')), + sellSideRiskRatio: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio')), + sellSideRiskRatio30dEma: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio_30d_ema')), + sellSideRiskRatio7dEma: createMetricPattern19(client, _m(acc, 'sell_side_risk_ratio_7d_ema')), + sopr: createMetricPattern19(client, _m(acc, 'sopr')), + sopr30dEma: createMetricPattern19(client, _m(acc, 'sopr_30d_ema')), + sopr7dEma: createMetricPattern19(client, _m(acc, 'sopr_7d_ema')), totalRealizedPnl: createMetricPattern1(client, _m(acc, 'total_realized_pnl')), - valueCreated: createMetricPattern26(client, _m(acc, 'value_created')), - valueCreatedSum: createMetricPattern2(client, _m(acc, 'value_created_sum')), - valueDestroyed: createMetricPattern26(client, _m(acc, 'value_destroyed')), - valueDestroyedSum: createMetricPattern2(client, _m(acc, 'value_destroyed_sum')), + valueCreated: createMetricPattern1(client, _m(acc, 'value_created')), + valueDestroyed: createMetricPattern1(client, _m(acc, 'value_destroyed')), }; } /** * @typedef {Object} Price111dSmaPattern - * @property {MetricPattern5} price - * @property {MetricPattern5} ratio - * @property {MetricPattern5} ratio1mSma - * @property {MetricPattern5} ratio1wSma + * @property {MetricPattern4} price + * @property {MetricPattern4} ratio + * @property {MetricPattern4} ratio1mSma + * @property {MetricPattern4} ratio1wSma * @property {Ratio1ySdPattern} ratio1ySd * @property {Ratio1ySdPattern} ratio2ySd * @property {Ratio1ySdPattern} ratio4ySd - * @property {MetricPattern5} ratioPct1 - * @property {MetricPattern5} ratioPct1Usd - * @property {MetricPattern5} ratioPct2 - * @property {MetricPattern5} ratioPct2Usd - * @property {MetricPattern5} ratioPct5 - * @property {MetricPattern5} ratioPct5Usd - * @property {MetricPattern5} ratioPct95 - * @property {MetricPattern5} ratioPct95Usd - * @property {MetricPattern5} ratioPct98 - * @property {MetricPattern5} ratioPct98Usd - * @property {MetricPattern5} ratioPct99 - * @property {MetricPattern5} ratioPct99Usd + * @property {MetricPattern4} ratioPct1 + * @property {MetricPattern4} ratioPct1Usd + * @property {MetricPattern4} ratioPct2 + * @property {MetricPattern4} ratioPct2Usd + * @property {MetricPattern4} ratioPct5 + * @property {MetricPattern4} ratioPct5Usd + * @property {MetricPattern4} ratioPct95 + * @property {MetricPattern4} ratioPct95Usd + * @property {MetricPattern4} ratioPct98 + * @property {MetricPattern4} ratioPct98Usd + * @property {MetricPattern4} ratioPct99 + * @property {MetricPattern4} ratioPct99Usd * @property {Ratio1ySdPattern} ratioSd */ @@ -2343,102 +2225,49 @@ function createRealizedPattern(client, acc) { */ function createPrice111dSmaPattern(client, acc) { return { - price: createMetricPattern5(client, acc), - ratio: createMetricPattern5(client, _m(acc, 'ratio')), - ratio1mSma: createMetricPattern5(client, _m(acc, 'ratio_1m_sma')), - ratio1wSma: createMetricPattern5(client, _m(acc, 'ratio_1w_sma')), + price: createMetricPattern4(client, acc), + ratio: createMetricPattern4(client, _m(acc, 'ratio')), + ratio1mSma: createMetricPattern4(client, _m(acc, 'ratio_1m_sma')), + ratio1wSma: createMetricPattern4(client, _m(acc, 'ratio_1w_sma')), ratio1ySd: createRatio1ySdPattern(client, _m(acc, 'ratio_1y')), ratio2ySd: createRatio1ySdPattern(client, _m(acc, 'ratio_2y')), ratio4ySd: createRatio1ySdPattern(client, _m(acc, 'ratio_4y')), - ratioPct1: createMetricPattern5(client, _m(acc, 'ratio_pct1')), - ratioPct1Usd: createMetricPattern5(client, _m(acc, 'ratio_pct1_usd')), - ratioPct2: createMetricPattern5(client, _m(acc, 'ratio_pct2')), - ratioPct2Usd: createMetricPattern5(client, _m(acc, 'ratio_pct2_usd')), - ratioPct5: createMetricPattern5(client, _m(acc, 'ratio_pct5')), - ratioPct5Usd: createMetricPattern5(client, _m(acc, 'ratio_pct5_usd')), - ratioPct95: createMetricPattern5(client, _m(acc, 'ratio_pct95')), - ratioPct95Usd: createMetricPattern5(client, _m(acc, 'ratio_pct95_usd')), - ratioPct98: createMetricPattern5(client, _m(acc, 'ratio_pct98')), - ratioPct98Usd: createMetricPattern5(client, _m(acc, 'ratio_pct98_usd')), - ratioPct99: createMetricPattern5(client, _m(acc, 'ratio_pct99')), - ratioPct99Usd: createMetricPattern5(client, _m(acc, 'ratio_pct99_usd')), + ratioPct1: createMetricPattern4(client, _m(acc, 'ratio_pct1')), + ratioPct1Usd: createMetricPattern4(client, _m(acc, 'ratio_pct1_usd')), + ratioPct2: createMetricPattern4(client, _m(acc, 'ratio_pct2')), + ratioPct2Usd: createMetricPattern4(client, _m(acc, 'ratio_pct2_usd')), + ratioPct5: createMetricPattern4(client, _m(acc, 'ratio_pct5')), + ratioPct5Usd: createMetricPattern4(client, _m(acc, 'ratio_pct5_usd')), + ratioPct95: createMetricPattern4(client, _m(acc, 'ratio_pct95')), + ratioPct95Usd: createMetricPattern4(client, _m(acc, 'ratio_pct95_usd')), + ratioPct98: createMetricPattern4(client, _m(acc, 'ratio_pct98')), + ratioPct98Usd: createMetricPattern4(client, _m(acc, 'ratio_pct98_usd')), + ratioPct99: createMetricPattern4(client, _m(acc, 'ratio_pct99')), + ratioPct99Usd: createMetricPattern4(client, _m(acc, 'ratio_pct99_usd')), ratioSd: createRatio1ySdPattern(client, _m(acc, 'ratio')), }; } -/** - * @typedef {Object} PercentilesPattern2 - * @property {MetricPattern5} costBasisPct05 - * @property {MetricPattern5} costBasisPct10 - * @property {MetricPattern5} costBasisPct15 - * @property {MetricPattern5} costBasisPct20 - * @property {MetricPattern5} costBasisPct25 - * @property {MetricPattern5} costBasisPct30 - * @property {MetricPattern5} costBasisPct35 - * @property {MetricPattern5} costBasisPct40 - * @property {MetricPattern5} costBasisPct45 - * @property {MetricPattern5} costBasisPct50 - * @property {MetricPattern5} costBasisPct55 - * @property {MetricPattern5} costBasisPct60 - * @property {MetricPattern5} costBasisPct65 - * @property {MetricPattern5} costBasisPct70 - * @property {MetricPattern5} costBasisPct75 - * @property {MetricPattern5} costBasisPct80 - * @property {MetricPattern5} costBasisPct85 - * @property {MetricPattern5} costBasisPct90 - * @property {MetricPattern5} costBasisPct95 - */ - -/** - * Create a PercentilesPattern2 pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {PercentilesPattern2} - */ -function createPercentilesPattern2(client, acc) { - return { - costBasisPct05: createMetricPattern5(client, _m(acc, 'pct05')), - costBasisPct10: createMetricPattern5(client, _m(acc, 'pct10')), - costBasisPct15: createMetricPattern5(client, _m(acc, 'pct15')), - costBasisPct20: createMetricPattern5(client, _m(acc, 'pct20')), - costBasisPct25: createMetricPattern5(client, _m(acc, 'pct25')), - costBasisPct30: createMetricPattern5(client, _m(acc, 'pct30')), - costBasisPct35: createMetricPattern5(client, _m(acc, 'pct35')), - costBasisPct40: createMetricPattern5(client, _m(acc, 'pct40')), - costBasisPct45: createMetricPattern5(client, _m(acc, 'pct45')), - costBasisPct50: createMetricPattern5(client, _m(acc, 'pct50')), - costBasisPct55: createMetricPattern5(client, _m(acc, 'pct55')), - costBasisPct60: createMetricPattern5(client, _m(acc, 'pct60')), - costBasisPct65: createMetricPattern5(client, _m(acc, 'pct65')), - costBasisPct70: createMetricPattern5(client, _m(acc, 'pct70')), - costBasisPct75: createMetricPattern5(client, _m(acc, 'pct75')), - costBasisPct80: createMetricPattern5(client, _m(acc, 'pct80')), - costBasisPct85: createMetricPattern5(client, _m(acc, 'pct85')), - costBasisPct90: createMetricPattern5(client, _m(acc, 'pct90')), - costBasisPct95: createMetricPattern5(client, _m(acc, 'pct95')), - }; -} - /** * @typedef {Object} ActivePriceRatioPattern - * @property {MetricPattern5} ratio - * @property {MetricPattern5} ratio1mSma - * @property {MetricPattern5} ratio1wSma + * @property {MetricPattern4} ratio + * @property {MetricPattern4} ratio1mSma + * @property {MetricPattern4} ratio1wSma * @property {Ratio1ySdPattern} ratio1ySd * @property {Ratio1ySdPattern} ratio2ySd * @property {Ratio1ySdPattern} ratio4ySd - * @property {MetricPattern5} ratioPct1 - * @property {MetricPattern5} ratioPct1Usd - * @property {MetricPattern5} ratioPct2 - * @property {MetricPattern5} ratioPct2Usd - * @property {MetricPattern5} ratioPct5 - * @property {MetricPattern5} ratioPct5Usd - * @property {MetricPattern5} ratioPct95 - * @property {MetricPattern5} ratioPct95Usd - * @property {MetricPattern5} ratioPct98 - * @property {MetricPattern5} ratioPct98Usd - * @property {MetricPattern5} ratioPct99 - * @property {MetricPattern5} ratioPct99Usd + * @property {MetricPattern4} ratioPct1 + * @property {MetricPattern4} ratioPct1Usd + * @property {MetricPattern4} ratioPct2 + * @property {MetricPattern4} ratioPct2Usd + * @property {MetricPattern4} ratioPct5 + * @property {MetricPattern4} ratioPct5Usd + * @property {MetricPattern4} ratioPct95 + * @property {MetricPattern4} ratioPct95Usd + * @property {MetricPattern4} ratioPct98 + * @property {MetricPattern4} ratioPct98Usd + * @property {MetricPattern4} ratioPct99 + * @property {MetricPattern4} ratioPct99Usd * @property {Ratio1ySdPattern} ratioSd */ @@ -2450,28 +2279,81 @@ function createPercentilesPattern2(client, acc) { */ function createActivePriceRatioPattern(client, acc) { return { - ratio: createMetricPattern5(client, acc), - ratio1mSma: createMetricPattern5(client, _m(acc, '1m_sma')), - ratio1wSma: createMetricPattern5(client, _m(acc, '1w_sma')), + ratio: createMetricPattern4(client, acc), + ratio1mSma: createMetricPattern4(client, _m(acc, '1m_sma')), + ratio1wSma: createMetricPattern4(client, _m(acc, '1w_sma')), ratio1ySd: createRatio1ySdPattern(client, _m(acc, '1y')), ratio2ySd: createRatio1ySdPattern(client, _m(acc, '2y')), ratio4ySd: createRatio1ySdPattern(client, _m(acc, '4y')), - ratioPct1: createMetricPattern5(client, _m(acc, 'pct1')), - ratioPct1Usd: createMetricPattern5(client, _m(acc, 'pct1_usd')), - ratioPct2: createMetricPattern5(client, _m(acc, 'pct2')), - ratioPct2Usd: createMetricPattern5(client, _m(acc, 'pct2_usd')), - ratioPct5: createMetricPattern5(client, _m(acc, 'pct5')), - ratioPct5Usd: createMetricPattern5(client, _m(acc, 'pct5_usd')), - ratioPct95: createMetricPattern5(client, _m(acc, 'pct95')), - ratioPct95Usd: createMetricPattern5(client, _m(acc, 'pct95_usd')), - ratioPct98: createMetricPattern5(client, _m(acc, 'pct98')), - ratioPct98Usd: createMetricPattern5(client, _m(acc, 'pct98_usd')), - ratioPct99: createMetricPattern5(client, _m(acc, 'pct99')), - ratioPct99Usd: createMetricPattern5(client, _m(acc, 'pct99_usd')), + ratioPct1: createMetricPattern4(client, _m(acc, 'pct1')), + ratioPct1Usd: createMetricPattern4(client, _m(acc, 'pct1_usd')), + ratioPct2: createMetricPattern4(client, _m(acc, 'pct2')), + ratioPct2Usd: createMetricPattern4(client, _m(acc, 'pct2_usd')), + ratioPct5: createMetricPattern4(client, _m(acc, 'pct5')), + ratioPct5Usd: createMetricPattern4(client, _m(acc, 'pct5_usd')), + ratioPct95: createMetricPattern4(client, _m(acc, 'pct95')), + ratioPct95Usd: createMetricPattern4(client, _m(acc, 'pct95_usd')), + ratioPct98: createMetricPattern4(client, _m(acc, 'pct98')), + ratioPct98Usd: createMetricPattern4(client, _m(acc, 'pct98_usd')), + ratioPct99: createMetricPattern4(client, _m(acc, 'pct99')), + ratioPct99Usd: createMetricPattern4(client, _m(acc, 'pct99_usd')), ratioSd: createRatio1ySdPattern(client, acc), }; } +/** + * @typedef {Object} PercentilesPattern2 + * @property {MetricPattern4} costBasisPct05 + * @property {MetricPattern4} costBasisPct10 + * @property {MetricPattern4} costBasisPct15 + * @property {MetricPattern4} costBasisPct20 + * @property {MetricPattern4} costBasisPct25 + * @property {MetricPattern4} costBasisPct30 + * @property {MetricPattern4} costBasisPct35 + * @property {MetricPattern4} costBasisPct40 + * @property {MetricPattern4} costBasisPct45 + * @property {MetricPattern4} costBasisPct50 + * @property {MetricPattern4} costBasisPct55 + * @property {MetricPattern4} costBasisPct60 + * @property {MetricPattern4} costBasisPct65 + * @property {MetricPattern4} costBasisPct70 + * @property {MetricPattern4} costBasisPct75 + * @property {MetricPattern4} costBasisPct80 + * @property {MetricPattern4} costBasisPct85 + * @property {MetricPattern4} costBasisPct90 + * @property {MetricPattern4} costBasisPct95 + */ + +/** + * Create a PercentilesPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {PercentilesPattern2} + */ +function createPercentilesPattern2(client, acc) { + return { + costBasisPct05: createMetricPattern4(client, _m(acc, 'pct05')), + costBasisPct10: createMetricPattern4(client, _m(acc, 'pct10')), + costBasisPct15: createMetricPattern4(client, _m(acc, 'pct15')), + costBasisPct20: createMetricPattern4(client, _m(acc, 'pct20')), + costBasisPct25: createMetricPattern4(client, _m(acc, 'pct25')), + costBasisPct30: createMetricPattern4(client, _m(acc, 'pct30')), + costBasisPct35: createMetricPattern4(client, _m(acc, 'pct35')), + costBasisPct40: createMetricPattern4(client, _m(acc, 'pct40')), + costBasisPct45: createMetricPattern4(client, _m(acc, 'pct45')), + costBasisPct50: createMetricPattern4(client, _m(acc, 'pct50')), + costBasisPct55: createMetricPattern4(client, _m(acc, 'pct55')), + costBasisPct60: createMetricPattern4(client, _m(acc, 'pct60')), + costBasisPct65: createMetricPattern4(client, _m(acc, 'pct65')), + costBasisPct70: createMetricPattern4(client, _m(acc, 'pct70')), + costBasisPct75: createMetricPattern4(client, _m(acc, 'pct75')), + costBasisPct80: createMetricPattern4(client, _m(acc, 'pct80')), + costBasisPct85: createMetricPattern4(client, _m(acc, 'pct85')), + costBasisPct90: createMetricPattern4(client, _m(acc, 'pct90')), + costBasisPct95: createMetricPattern4(client, _m(acc, 'pct95')), + }; +} + /** * @typedef {Object} RelativePattern5 * @property {MetricPattern3} negUnrealizedLossRelToMarketCap @@ -2480,12 +2362,12 @@ function createActivePriceRatioPattern(client, acc) { * @property {MetricPattern3} netUnrealizedPnlRelToMarketCap * @property {MetricPattern3} netUnrealizedPnlRelToOwnMarketCap * @property {MetricPattern3} netUnrealizedPnlRelToOwnTotalUnrealizedPnl - * @property {MetricPattern5} nupl + * @property {MetricPattern4} nupl * @property {MetricPattern3} supplyInLossRelToCirculatingSupply * @property {MetricPattern3} supplyInLossRelToOwnSupply * @property {MetricPattern3} supplyInProfitRelToCirculatingSupply * @property {MetricPattern3} supplyInProfitRelToOwnSupply - * @property {MetricPattern5} supplyRelToCirculatingSupply + * @property {MetricPattern4} supplyRelToCirculatingSupply * @property {MetricPattern3} unrealizedLossRelToMarketCap * @property {MetricPattern3} unrealizedLossRelToOwnMarketCap * @property {MetricPattern3} unrealizedLossRelToOwnTotalUnrealizedPnl @@ -2508,12 +2390,12 @@ function createRelativePattern5(client, acc) { netUnrealizedPnlRelToMarketCap: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_market_cap')), netUnrealizedPnlRelToOwnMarketCap: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')), netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')), - nupl: createMetricPattern5(client, _m(acc, 'nupl')), + nupl: createMetricPattern4(client, _m(acc, 'nupl')), supplyInLossRelToCirculatingSupply: createMetricPattern3(client, _m(acc, 'supply_in_loss_rel_to_circulating_supply')), supplyInLossRelToOwnSupply: createMetricPattern3(client, _m(acc, 'supply_in_loss_rel_to_own_supply')), supplyInProfitRelToCirculatingSupply: createMetricPattern3(client, _m(acc, 'supply_in_profit_rel_to_circulating_supply')), supplyInProfitRelToOwnSupply: createMetricPattern3(client, _m(acc, 'supply_in_profit_rel_to_own_supply')), - supplyRelToCirculatingSupply: createMetricPattern5(client, _m(acc, 'supply_rel_to_circulating_supply')), + supplyRelToCirculatingSupply: createMetricPattern4(client, _m(acc, 'supply_rel_to_circulating_supply')), unrealizedLossRelToMarketCap: createMetricPattern3(client, _m(acc, 'unrealized_loss_rel_to_market_cap')), unrealizedLossRelToOwnMarketCap: createMetricPattern3(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')), unrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern3(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')), @@ -2524,62 +2406,64 @@ function createRelativePattern5(client, acc) { } /** - * @typedef {Object} AXbtPattern - * @property {BlockCountPattern} _1dDominance - * @property {MetricPattern5} _1mBlocksMined - * @property {MetricPattern5} _1mDominance - * @property {MetricPattern5} _1wBlocksMined - * @property {MetricPattern5} _1wDominance - * @property {MetricPattern5} _1yBlocksMined - * @property {MetricPattern5} _1yDominance + * @typedef {Object} AaopoolPattern + * @property {MetricPattern1} _1mBlocksMined + * @property {MetricPattern1} _1mDominance + * @property {MetricPattern1} _1wBlocksMined + * @property {MetricPattern1} _1wDominance + * @property {MetricPattern1} _1yBlocksMined + * @property {MetricPattern1} _1yDominance + * @property {MetricPattern1} _24hBlocksMined + * @property {MetricPattern1} _24hDominance * @property {BlockCountPattern} blocksMined * @property {UnclaimedRewardsPattern} coinbase - * @property {MetricPattern5} daysSinceBlock - * @property {BlockCountPattern} dominance - * @property {SentPattern} fee - * @property {SentPattern} subsidy + * @property {MetricPattern4} daysSinceBlock + * @property {MetricPattern1} dominance + * @property {UnclaimedRewardsPattern} fee + * @property {UnclaimedRewardsPattern} subsidy */ /** - * Create a AXbtPattern pattern node + * Create a AaopoolPattern pattern node * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {AXbtPattern} + * @returns {AaopoolPattern} */ -function createAXbtPattern(client, acc) { +function createAaopoolPattern(client, acc) { return { - _1dDominance: createBlockCountPattern(client, _m(acc, '1d_dominance')), - _1mBlocksMined: createMetricPattern5(client, _m(acc, '1m_blocks_mined')), - _1mDominance: createMetricPattern5(client, _m(acc, '1m_dominance')), - _1wBlocksMined: createMetricPattern5(client, _m(acc, '1w_blocks_mined')), - _1wDominance: createMetricPattern5(client, _m(acc, '1w_dominance')), - _1yBlocksMined: createMetricPattern5(client, _m(acc, '1y_blocks_mined')), - _1yDominance: createMetricPattern5(client, _m(acc, '1y_dominance')), + _1mBlocksMined: createMetricPattern1(client, _m(acc, '1m_blocks_mined')), + _1mDominance: createMetricPattern1(client, _m(acc, '1m_dominance')), + _1wBlocksMined: createMetricPattern1(client, _m(acc, '1w_blocks_mined')), + _1wDominance: createMetricPattern1(client, _m(acc, '1w_dominance')), + _1yBlocksMined: createMetricPattern1(client, _m(acc, '1y_blocks_mined')), + _1yDominance: createMetricPattern1(client, _m(acc, '1y_dominance')), + _24hBlocksMined: createMetricPattern1(client, _m(acc, '24h_blocks_mined')), + _24hDominance: createMetricPattern1(client, _m(acc, '24h_dominance')), blocksMined: createBlockCountPattern(client, _m(acc, 'blocks_mined')), coinbase: createUnclaimedRewardsPattern(client, _m(acc, 'coinbase')), - daysSinceBlock: createMetricPattern5(client, _m(acc, 'days_since_block')), - dominance: createBlockCountPattern(client, _m(acc, 'dominance')), - fee: createSentPattern(client, acc), - subsidy: createSentPattern(client, acc), + daysSinceBlock: createMetricPattern4(client, _m(acc, 'days_since_block')), + dominance: createMetricPattern1(client, _m(acc, 'dominance')), + fee: createUnclaimedRewardsPattern(client, _m(acc, 'fee')), + subsidy: createUnclaimedRewardsPattern(client, _m(acc, 'subsidy')), }; } /** * @template T * @typedef {Object} PriceAgoPattern - * @property {MetricPattern5} _10y - * @property {MetricPattern5} _1d - * @property {MetricPattern5} _1m - * @property {MetricPattern5} _1w - * @property {MetricPattern5} _1y - * @property {MetricPattern5} _2y - * @property {MetricPattern5} _3m - * @property {MetricPattern5} _3y - * @property {MetricPattern5} _4y - * @property {MetricPattern5} _5y - * @property {MetricPattern5} _6m - * @property {MetricPattern5} _6y - * @property {MetricPattern5} _8y + * @property {MetricPattern4} _10y + * @property {MetricPattern4} _1d + * @property {MetricPattern4} _1m + * @property {MetricPattern4} _1w + * @property {MetricPattern4} _1y + * @property {MetricPattern4} _2y + * @property {MetricPattern4} _3m + * @property {MetricPattern4} _3y + * @property {MetricPattern4} _4y + * @property {MetricPattern4} _5y + * @property {MetricPattern4} _6m + * @property {MetricPattern4} _6y + * @property {MetricPattern4} _8y */ /** @@ -2591,19 +2475,19 @@ function createAXbtPattern(client, acc) { */ function createPriceAgoPattern(client, acc) { return { - _10y: createMetricPattern5(client, _m(acc, '10y_ago')), - _1d: createMetricPattern5(client, _m(acc, '1d_ago')), - _1m: createMetricPattern5(client, _m(acc, '1m_ago')), - _1w: createMetricPattern5(client, _m(acc, '1w_ago')), - _1y: createMetricPattern5(client, _m(acc, '1y_ago')), - _2y: createMetricPattern5(client, _m(acc, '2y_ago')), - _3m: createMetricPattern5(client, _m(acc, '3m_ago')), - _3y: createMetricPattern5(client, _m(acc, '3y_ago')), - _4y: createMetricPattern5(client, _m(acc, '4y_ago')), - _5y: createMetricPattern5(client, _m(acc, '5y_ago')), - _6m: createMetricPattern5(client, _m(acc, '6m_ago')), - _6y: createMetricPattern5(client, _m(acc, '6y_ago')), - _8y: createMetricPattern5(client, _m(acc, '8y_ago')), + _10y: createMetricPattern4(client, _m(acc, '10y_ago')), + _1d: createMetricPattern4(client, _m(acc, '1d_ago')), + _1m: createMetricPattern4(client, _m(acc, '1m_ago')), + _1w: createMetricPattern4(client, _m(acc, '1w_ago')), + _1y: createMetricPattern4(client, _m(acc, '1y_ago')), + _2y: createMetricPattern4(client, _m(acc, '2y_ago')), + _3m: createMetricPattern4(client, _m(acc, '3m_ago')), + _3y: createMetricPattern4(client, _m(acc, '3y_ago')), + _4y: createMetricPattern4(client, _m(acc, '4y_ago')), + _5y: createMetricPattern4(client, _m(acc, '5y_ago')), + _6m: createMetricPattern4(client, _m(acc, '6m_ago')), + _6y: createMetricPattern4(client, _m(acc, '6y_ago')), + _8y: createMetricPattern4(client, _m(acc, '8y_ago')), }; } @@ -2649,18 +2533,18 @@ function createPeriodLumpSumStackPattern(client, acc) { /** * @template T * @typedef {Object} PeriodAveragePricePattern - * @property {MetricPattern5} _10y - * @property {MetricPattern5} _1m - * @property {MetricPattern5} _1w - * @property {MetricPattern5} _1y - * @property {MetricPattern5} _2y - * @property {MetricPattern5} _3m - * @property {MetricPattern5} _3y - * @property {MetricPattern5} _4y - * @property {MetricPattern5} _5y - * @property {MetricPattern5} _6m - * @property {MetricPattern5} _6y - * @property {MetricPattern5} _8y + * @property {MetricPattern4} _10y + * @property {MetricPattern4} _1m + * @property {MetricPattern4} _1w + * @property {MetricPattern4} _1y + * @property {MetricPattern4} _2y + * @property {MetricPattern4} _3m + * @property {MetricPattern4} _3y + * @property {MetricPattern4} _4y + * @property {MetricPattern4} _5y + * @property {MetricPattern4} _6m + * @property {MetricPattern4} _6y + * @property {MetricPattern4} _8y */ /** @@ -2672,35 +2556,35 @@ function createPeriodLumpSumStackPattern(client, acc) { */ function createPeriodAveragePricePattern(client, acc) { return { - _10y: createMetricPattern5(client, (acc ? `10y_${acc}` : '10y')), - _1m: createMetricPattern5(client, (acc ? `1m_${acc}` : '1m')), - _1w: createMetricPattern5(client, (acc ? `1w_${acc}` : '1w')), - _1y: createMetricPattern5(client, (acc ? `1y_${acc}` : '1y')), - _2y: createMetricPattern5(client, (acc ? `2y_${acc}` : '2y')), - _3m: createMetricPattern5(client, (acc ? `3m_${acc}` : '3m')), - _3y: createMetricPattern5(client, (acc ? `3y_${acc}` : '3y')), - _4y: createMetricPattern5(client, (acc ? `4y_${acc}` : '4y')), - _5y: createMetricPattern5(client, (acc ? `5y_${acc}` : '5y')), - _6m: createMetricPattern5(client, (acc ? `6m_${acc}` : '6m')), - _6y: createMetricPattern5(client, (acc ? `6y_${acc}` : '6y')), - _8y: createMetricPattern5(client, (acc ? `8y_${acc}` : '8y')), + _10y: createMetricPattern4(client, (acc ? `10y_${acc}` : '10y')), + _1m: createMetricPattern4(client, (acc ? `1m_${acc}` : '1m')), + _1w: createMetricPattern4(client, (acc ? `1w_${acc}` : '1w')), + _1y: createMetricPattern4(client, (acc ? `1y_${acc}` : '1y')), + _2y: createMetricPattern4(client, (acc ? `2y_${acc}` : '2y')), + _3m: createMetricPattern4(client, (acc ? `3m_${acc}` : '3m')), + _3y: createMetricPattern4(client, (acc ? `3y_${acc}` : '3y')), + _4y: createMetricPattern4(client, (acc ? `4y_${acc}` : '4y')), + _5y: createMetricPattern4(client, (acc ? `5y_${acc}` : '5y')), + _6m: createMetricPattern4(client, (acc ? `6m_${acc}` : '6m')), + _6y: createMetricPattern4(client, (acc ? `6y_${acc}` : '6y')), + _8y: createMetricPattern4(client, (acc ? `8y_${acc}` : '8y')), }; } /** * @template T * @typedef {Object} ClassAveragePricePattern - * @property {MetricPattern5} _2015 - * @property {MetricPattern5} _2016 - * @property {MetricPattern5} _2017 - * @property {MetricPattern5} _2018 - * @property {MetricPattern5} _2019 - * @property {MetricPattern5} _2020 - * @property {MetricPattern5} _2021 - * @property {MetricPattern5} _2022 - * @property {MetricPattern5} _2023 - * @property {MetricPattern5} _2024 - * @property {MetricPattern5} _2025 + * @property {MetricPattern4} _2015 + * @property {MetricPattern4} _2016 + * @property {MetricPattern4} _2017 + * @property {MetricPattern4} _2018 + * @property {MetricPattern4} _2019 + * @property {MetricPattern4} _2020 + * @property {MetricPattern4} _2021 + * @property {MetricPattern4} _2022 + * @property {MetricPattern4} _2023 + * @property {MetricPattern4} _2024 + * @property {MetricPattern4} _2025 */ /** @@ -2712,17 +2596,17 @@ function createPeriodAveragePricePattern(client, acc) { */ function createClassAveragePricePattern(client, acc) { return { - _2015: createMetricPattern5(client, _m(acc, '2015_average_price')), - _2016: createMetricPattern5(client, _m(acc, '2016_average_price')), - _2017: createMetricPattern5(client, _m(acc, '2017_average_price')), - _2018: createMetricPattern5(client, _m(acc, '2018_average_price')), - _2019: createMetricPattern5(client, _m(acc, '2019_average_price')), - _2020: createMetricPattern5(client, _m(acc, '2020_average_price')), - _2021: createMetricPattern5(client, _m(acc, '2021_average_price')), - _2022: createMetricPattern5(client, _m(acc, '2022_average_price')), - _2023: createMetricPattern5(client, _m(acc, '2023_average_price')), - _2024: createMetricPattern5(client, _m(acc, '2024_average_price')), - _2025: createMetricPattern5(client, _m(acc, '2025_average_price')), + _2015: createMetricPattern4(client, _m(acc, '2015_average_price')), + _2016: createMetricPattern4(client, _m(acc, '2016_average_price')), + _2017: createMetricPattern4(client, _m(acc, '2017_average_price')), + _2018: createMetricPattern4(client, _m(acc, '2018_average_price')), + _2019: createMetricPattern4(client, _m(acc, '2019_average_price')), + _2020: createMetricPattern4(client, _m(acc, '2020_average_price')), + _2021: createMetricPattern4(client, _m(acc, '2021_average_price')), + _2022: createMetricPattern4(client, _m(acc, '2022_average_price')), + _2023: createMetricPattern4(client, _m(acc, '2023_average_price')), + _2024: createMetricPattern4(client, _m(acc, '2024_average_price')), + _2025: createMetricPattern4(client, _m(acc, '2025_average_price')), }; } @@ -2765,12 +2649,12 @@ function createRelativePattern2(client, acc) { * @typedef {Object} RelativePattern * @property {MetricPattern3} negUnrealizedLossRelToMarketCap * @property {MetricPattern3} netUnrealizedPnlRelToMarketCap - * @property {MetricPattern5} nupl + * @property {MetricPattern4} nupl * @property {MetricPattern3} supplyInLossRelToCirculatingSupply * @property {MetricPattern3} supplyInLossRelToOwnSupply * @property {MetricPattern3} supplyInProfitRelToCirculatingSupply * @property {MetricPattern3} supplyInProfitRelToOwnSupply - * @property {MetricPattern5} supplyRelToCirculatingSupply + * @property {MetricPattern4} supplyRelToCirculatingSupply * @property {MetricPattern3} unrealizedLossRelToMarketCap * @property {MetricPattern3} unrealizedProfitRelToMarketCap */ @@ -2785,25 +2669,58 @@ function createRelativePattern(client, acc) { return { negUnrealizedLossRelToMarketCap: createMetricPattern3(client, _m(acc, 'neg_unrealized_loss_rel_to_market_cap')), netUnrealizedPnlRelToMarketCap: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_market_cap')), - nupl: createMetricPattern5(client, _m(acc, 'nupl')), + nupl: createMetricPattern4(client, _m(acc, 'nupl')), supplyInLossRelToCirculatingSupply: createMetricPattern3(client, _m(acc, 'supply_in_loss_rel_to_circulating_supply')), supplyInLossRelToOwnSupply: createMetricPattern3(client, _m(acc, 'supply_in_loss_rel_to_own_supply')), supplyInProfitRelToCirculatingSupply: createMetricPattern3(client, _m(acc, 'supply_in_profit_rel_to_circulating_supply')), supplyInProfitRelToOwnSupply: createMetricPattern3(client, _m(acc, 'supply_in_profit_rel_to_own_supply')), - supplyRelToCirculatingSupply: createMetricPattern5(client, _m(acc, 'supply_rel_to_circulating_supply')), + supplyRelToCirculatingSupply: createMetricPattern4(client, _m(acc, 'supply_rel_to_circulating_supply')), unrealizedLossRelToMarketCap: createMetricPattern3(client, _m(acc, 'unrealized_loss_rel_to_market_cap')), unrealizedProfitRelToMarketCap: createMetricPattern3(client, _m(acc, 'unrealized_profit_rel_to_market_cap')), }; } +/** + * @typedef {Object} AddrCountPattern + * @property {MetricPattern1} all + * @property {MetricPattern1} p2a + * @property {MetricPattern1} p2pk33 + * @property {MetricPattern1} p2pk65 + * @property {MetricPattern1} p2pkh + * @property {MetricPattern1} p2sh + * @property {MetricPattern1} p2tr + * @property {MetricPattern1} p2wpkh + * @property {MetricPattern1} p2wsh + */ + +/** + * Create a AddrCountPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {AddrCountPattern} + */ +function createAddrCountPattern(client, acc) { + return { + all: createMetricPattern1(client, (acc ? `addr_${acc}` : 'addr')), + p2a: createMetricPattern1(client, (acc ? `p2a_addr_${acc}` : 'p2a_addr')), + p2pk33: createMetricPattern1(client, (acc ? `p2pk33_addr_${acc}` : 'p2pk33_addr')), + p2pk65: createMetricPattern1(client, (acc ? `p2pk65_addr_${acc}` : 'p2pk65_addr')), + p2pkh: createMetricPattern1(client, (acc ? `p2pkh_addr_${acc}` : 'p2pkh_addr')), + p2sh: createMetricPattern1(client, (acc ? `p2sh_addr_${acc}` : 'p2sh_addr')), + p2tr: createMetricPattern1(client, (acc ? `p2tr_addr_${acc}` : 'p2tr_addr')), + p2wpkh: createMetricPattern1(client, (acc ? `p2wpkh_addr_${acc}` : 'p2wpkh_addr')), + p2wsh: createMetricPattern1(client, (acc ? `p2wsh_addr_${acc}` : 'p2wsh_addr')), + }; +} + /** * @typedef {Object} UnrealizedPattern * @property {MetricPattern3} negUnrealizedLoss * @property {MetricPattern3} netUnrealizedPnl - * @property {SupplyPattern2} supplyInLoss - * @property {SupplyValuePattern} supplyInLossValue - * @property {SupplyPattern2} supplyInProfit - * @property {SupplyValuePattern} supplyInProfitValue + * @property {SupplyInLossPattern} supplyInLoss + * @property {SupplyInLossValuePattern} supplyInLossValue + * @property {SupplyInLossPattern} supplyInProfit + * @property {SupplyInLossValuePattern} supplyInProfitValue * @property {MetricPattern3} totalUnrealizedPnl * @property {MetricPattern3} unrealizedLoss * @property {MetricPattern3} unrealizedProfit @@ -2819,82 +2736,16 @@ function createUnrealizedPattern(client, acc) { return { negUnrealizedLoss: createMetricPattern3(client, _m(acc, 'neg_unrealized_loss')), netUnrealizedPnl: createMetricPattern3(client, _m(acc, 'net_unrealized_pnl')), - supplyInLoss: createSupplyPattern2(client, _m(acc, 'supply_in_loss')), - supplyInLossValue: createSupplyValuePattern(client, _m(acc, 'supply_in_loss')), - supplyInProfit: createSupplyPattern2(client, _m(acc, 'supply_in_profit')), - supplyInProfitValue: createSupplyValuePattern(client, _m(acc, 'supply_in_profit')), + supplyInLoss: createSupplyInLossPattern(client, _m(acc, 'supply_in_loss')), + supplyInLossValue: createSupplyInLossValuePattern(client, _m(acc, 'supply_in_loss')), + supplyInProfit: createSupplyInLossPattern(client, _m(acc, 'supply_in_profit')), + supplyInProfitValue: createSupplyInLossValuePattern(client, _m(acc, 'supply_in_profit')), totalUnrealizedPnl: createMetricPattern3(client, _m(acc, 'total_unrealized_pnl')), unrealizedLoss: createMetricPattern3(client, _m(acc, 'unrealized_loss')), unrealizedProfit: createMetricPattern3(client, _m(acc, 'unrealized_profit')), }; } -/** - * @template T - * @typedef {Object} AddresstypeToHeightToAddrCountPattern - * @property {MetricPattern26} p2a - * @property {MetricPattern26} p2pk33 - * @property {MetricPattern26} p2pk65 - * @property {MetricPattern26} p2pkh - * @property {MetricPattern26} p2sh - * @property {MetricPattern26} p2tr - * @property {MetricPattern26} p2wpkh - * @property {MetricPattern26} p2wsh - */ - -/** - * Create a AddresstypeToHeightToAddrCountPattern pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {AddresstypeToHeightToAddrCountPattern} - */ -function createAddresstypeToHeightToAddrCountPattern(client, acc) { - return { - p2a: createMetricPattern26(client, (acc ? `p2a_${acc}` : 'p2a')), - p2pk33: createMetricPattern26(client, (acc ? `p2pk33_${acc}` : 'p2pk33')), - p2pk65: createMetricPattern26(client, (acc ? `p2pk65_${acc}` : 'p2pk65')), - p2pkh: createMetricPattern26(client, (acc ? `p2pkh_${acc}` : 'p2pkh')), - p2sh: createMetricPattern26(client, (acc ? `p2sh_${acc}` : 'p2sh')), - p2tr: createMetricPattern26(client, (acc ? `p2tr_${acc}` : 'p2tr')), - p2wpkh: createMetricPattern26(client, (acc ? `p2wpkh_${acc}` : 'p2wpkh')), - p2wsh: createMetricPattern26(client, (acc ? `p2wsh_${acc}` : 'p2wsh')), - }; -} - -/** - * @template T - * @typedef {Object} CountPattern2 - * @property {MetricPattern2} average - * @property {MetricPattern6} cumulative - * @property {BlockIntervalPattern} distribution - * @property {MetricPattern6} max - * @property {MetricPattern6} min - * @property {MinmaxPattern} minmax - * @property {MetricPattern6} sum - * @property {SumCumPattern} sumCum - */ - -/** - * Create a CountPattern2 pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {CountPattern2} - */ -function createCountPattern2(client, acc) { - return { - average: createMetricPattern2(client, _m(acc, 'average')), - cumulative: createMetricPattern6(client, _m(acc, 'cumulative')), - distribution: createBlockIntervalPattern(client, acc), - max: createMetricPattern6(client, _m(acc, 'max')), - min: createMetricPattern6(client, _m(acc, 'min')), - minmax: createMinmaxPattern(client, acc), - sum: createMetricPattern6(client, _m(acc, 'sum')), - sumCum: createSumCumPattern(client, acc), - }; -} - /** * @typedef {Object} _0satsPattern * @property {ActivityPattern2} activity @@ -2902,7 +2753,7 @@ function createCountPattern2(client, acc) { * @property {CostBasisPattern} costBasis * @property {RealizedPattern} realized * @property {RelativePattern} relative - * @property {SupplyPattern3} supply + * @property {SupplyPattern2} supply * @property {UnrealizedPattern} unrealized */ @@ -2919,20 +2770,20 @@ function create_0satsPattern(client, acc) { costBasis: createCostBasisPattern(client, acc), realized: createRealizedPattern(client, acc), relative: createRelativePattern(client, acc), - supply: createSupplyPattern3(client, acc), + supply: createSupplyPattern2(client, acc), unrealized: createUnrealizedPattern(client, acc), }; } /** * @typedef {Object} PeriodCagrPattern - * @property {MetricPattern5} _10y - * @property {MetricPattern5} _2y - * @property {MetricPattern5} _3y - * @property {MetricPattern5} _4y - * @property {MetricPattern5} _5y - * @property {MetricPattern5} _6y - * @property {MetricPattern5} _8y + * @property {MetricPattern4} _10y + * @property {MetricPattern4} _2y + * @property {MetricPattern4} _3y + * @property {MetricPattern4} _4y + * @property {MetricPattern4} _5y + * @property {MetricPattern4} _6y + * @property {MetricPattern4} _8y */ /** @@ -2943,52 +2794,21 @@ function create_0satsPattern(client, acc) { */ function createPeriodCagrPattern(client, acc) { return { - _10y: createMetricPattern5(client, (acc ? `10y_${acc}` : '10y')), - _2y: createMetricPattern5(client, (acc ? `2y_${acc}` : '2y')), - _3y: createMetricPattern5(client, (acc ? `3y_${acc}` : '3y')), - _4y: createMetricPattern5(client, (acc ? `4y_${acc}` : '4y')), - _5y: createMetricPattern5(client, (acc ? `5y_${acc}` : '5y')), - _6y: createMetricPattern5(client, (acc ? `6y_${acc}` : '6y')), - _8y: createMetricPattern5(client, (acc ? `8y_${acc}` : '8y')), + _10y: createMetricPattern4(client, (acc ? `10y_${acc}` : '10y')), + _2y: createMetricPattern4(client, (acc ? `2y_${acc}` : '2y')), + _3y: createMetricPattern4(client, (acc ? `3y_${acc}` : '3y')), + _4y: createMetricPattern4(client, (acc ? `4y_${acc}` : '4y')), + _5y: createMetricPattern4(client, (acc ? `5y_${acc}` : '5y')), + _6y: createMetricPattern4(client, (acc ? `6y_${acc}` : '6y')), + _8y: createMetricPattern4(client, (acc ? `8y_${acc}` : '8y')), }; } /** * @template T - * @typedef {Object} BlockSizePattern - * @property {MetricPattern6} average - * @property {MetricPattern4} cumulative - * @property {BlockIntervalPattern} distribution - * @property {MetricPattern6} max - * @property {MetricPattern6} min - * @property {MetricPattern6} sum - * @property {SumCumPattern} sumCum - */ - -/** - * Create a BlockSizePattern pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {BlockSizePattern} - */ -function createBlockSizePattern(client, acc) { - return { - average: createMetricPattern6(client, _m(acc, 'average')), - cumulative: createMetricPattern4(client, _m(acc, 'cumulative')), - distribution: createBlockIntervalPattern(client, acc), - max: createMetricPattern6(client, _m(acc, 'max')), - min: createMetricPattern6(client, _m(acc, 'min')), - sum: createMetricPattern6(client, _m(acc, 'sum')), - sumCum: createSumCumPattern(client, acc), - }; -} - -/** - * @template T - * @typedef {Object} DollarsPattern + * @typedef {Object} BitcoinPattern * @property {MetricPattern2} average - * @property {MetricPattern26} base + * @property {MetricPattern23} base * @property {MetricPattern1} cumulative * @property {MetricPattern2} max * @property {MetricPattern2} min @@ -2997,16 +2817,16 @@ function createBlockSizePattern(client, acc) { */ /** - * Create a DollarsPattern pattern node + * Create a BitcoinPattern pattern node * @template T * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {DollarsPattern} + * @returns {BitcoinPattern} */ -function createDollarsPattern(client, acc) { +function createBitcoinPattern(client, acc) { return { average: createMetricPattern2(client, _m(acc, 'average')), - base: createMetricPattern26(client, acc), + base: createMetricPattern23(client, acc), cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), max: createMetricPattern2(client, _m(acc, 'max')), min: createMetricPattern2(client, _m(acc, 'min')), @@ -3015,94 +2835,13 @@ function createDollarsPattern(client, acc) { }; } -/** - * @typedef {Object} _10yPattern - * @property {ActivityPattern2} activity - * @property {CostBasisPattern} costBasis - * @property {RealizedPattern4} realized - * @property {RelativePattern} relative - * @property {SupplyPattern3} supply - * @property {UnrealizedPattern} unrealized - */ - -/** - * Create a _10yPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {_10yPattern} - */ -function create_10yPattern(client, acc) { - return { - activity: createActivityPattern2(client, acc), - costBasis: createCostBasisPattern(client, acc), - realized: createRealizedPattern4(client, acc), - relative: createRelativePattern(client, acc), - supply: createSupplyPattern3(client, acc), - unrealized: createUnrealizedPattern(client, acc), - }; -} - -/** - * @typedef {Object} _10yTo12yPattern - * @property {ActivityPattern2} activity - * @property {CostBasisPattern2} costBasis - * @property {RealizedPattern2} realized - * @property {RelativePattern2} relative - * @property {SupplyPattern3} supply - * @property {UnrealizedPattern} unrealized - */ - -/** - * Create a _10yTo12yPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {_10yTo12yPattern} - */ -function create_10yTo12yPattern(client, acc) { - return { - activity: createActivityPattern2(client, acc), - costBasis: createCostBasisPattern2(client, acc), - realized: createRealizedPattern2(client, acc), - relative: createRelativePattern2(client, acc), - supply: createSupplyPattern3(client, acc), - unrealized: createUnrealizedPattern(client, acc), - }; -} - -/** - * @typedef {Object} _100btcPattern - * @property {ActivityPattern2} activity - * @property {CostBasisPattern} costBasis - * @property {RealizedPattern} realized - * @property {RelativePattern} relative - * @property {SupplyPattern3} supply - * @property {UnrealizedPattern} unrealized - */ - -/** - * Create a _100btcPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {_100btcPattern} - */ -function create_100btcPattern(client, acc) { - return { - activity: createActivityPattern2(client, acc), - costBasis: createCostBasisPattern(client, acc), - realized: createRealizedPattern(client, acc), - relative: createRelativePattern(client, acc), - supply: createSupplyPattern3(client, acc), - unrealized: createUnrealizedPattern(client, acc), - }; -} - /** * @typedef {Object} _0satsPattern2 * @property {ActivityPattern2} activity * @property {CostBasisPattern} costBasis * @property {RealizedPattern} realized * @property {RelativePattern4} relative - * @property {SupplyPattern3} supply + * @property {SupplyPattern2} supply * @property {UnrealizedPattern} unrealized */ @@ -3118,36 +2857,117 @@ function create_0satsPattern2(client, acc) { costBasis: createCostBasisPattern(client, acc), realized: createRealizedPattern(client, acc), relative: createRelativePattern4(client, _m(acc, 'supply_in')), - supply: createSupplyPattern3(client, acc), + supply: createSupplyPattern2(client, acc), + unrealized: createUnrealizedPattern(client, acc), + }; +} + +/** + * @typedef {Object} _10yTo12yPattern + * @property {ActivityPattern2} activity + * @property {CostBasisPattern2} costBasis + * @property {RealizedPattern2} realized + * @property {RelativePattern2} relative + * @property {SupplyPattern2} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * Create a _10yTo12yPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {_10yTo12yPattern} + */ +function create_10yTo12yPattern(client, acc) { + return { + activity: createActivityPattern2(client, acc), + costBasis: createCostBasisPattern2(client, acc), + realized: createRealizedPattern2(client, acc), + relative: createRelativePattern2(client, acc), + supply: createSupplyPattern2(client, acc), + unrealized: createUnrealizedPattern(client, acc), + }; +} + +/** + * @typedef {Object} _10yPattern + * @property {ActivityPattern2} activity + * @property {CostBasisPattern} costBasis + * @property {RealizedPattern4} realized + * @property {RelativePattern} relative + * @property {SupplyPattern2} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * Create a _10yPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {_10yPattern} + */ +function create_10yPattern(client, acc) { + return { + activity: createActivityPattern2(client, acc), + costBasis: createCostBasisPattern(client, acc), + realized: createRealizedPattern4(client, acc), + relative: createRelativePattern(client, acc), + supply: createSupplyPattern2(client, acc), + unrealized: createUnrealizedPattern(client, acc), + }; +} + +/** + * @typedef {Object} _100btcPattern + * @property {ActivityPattern2} activity + * @property {CostBasisPattern} costBasis + * @property {RealizedPattern} realized + * @property {RelativePattern} relative + * @property {SupplyPattern2} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * Create a _100btcPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {_100btcPattern} + */ +function create_100btcPattern(client, acc) { + return { + activity: createActivityPattern2(client, acc), + costBasis: createCostBasisPattern(client, acc), + realized: createRealizedPattern(client, acc), + relative: createRelativePattern(client, acc), + supply: createSupplyPattern2(client, acc), unrealized: createUnrealizedPattern(client, acc), }; } /** * @template T - * @typedef {Object} BitcoinPattern + * @typedef {Object} BlockSizePattern * @property {MetricPattern2} average - * @property {MetricPattern26} base - * @property {MetricPattern2} cumulative + * @property {MetricPattern1} cumulative * @property {MetricPattern2} max * @property {MetricPattern2} min + * @property {PercentilesPattern} percentiles * @property {MetricPattern2} sum */ /** - * Create a BitcoinPattern pattern node + * Create a BlockSizePattern pattern node * @template T * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {BitcoinPattern} + * @returns {BlockSizePattern} */ -function createBitcoinPattern(client, acc) { +function createBlockSizePattern(client, acc) { return { average: createMetricPattern2(client, _m(acc, 'average')), - base: createMetricPattern26(client, acc), - cumulative: createMetricPattern2(client, _m(acc, 'cum')), + cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), max: createMetricPattern2(client, _m(acc, 'max')), min: createMetricPattern2(client, _m(acc, 'min')), + percentiles: createPercentilesPattern(client, acc), sum: createMetricPattern2(client, _m(acc, 'sum')), }; } @@ -3156,8 +2976,8 @@ function createBitcoinPattern(client, acc) { * @typedef {Object} ActivityPattern2 * @property {BlockCountPattern} coinblocksDestroyed * @property {BlockCountPattern} coindaysDestroyed - * @property {MetricPattern26} satblocksDestroyed - * @property {MetricPattern26} satdaysDestroyed + * @property {MetricPattern23} satblocksDestroyed + * @property {MetricPattern23} satdaysDestroyed * @property {SentPattern} sent */ @@ -3171,70 +2991,20 @@ function createActivityPattern2(client, acc) { return { coinblocksDestroyed: createBlockCountPattern(client, _m(acc, 'coinblocks_destroyed')), coindaysDestroyed: createBlockCountPattern(client, _m(acc, 'coindays_destroyed')), - satblocksDestroyed: createMetricPattern26(client, _m(acc, 'satblocks_destroyed')), - satdaysDestroyed: createMetricPattern26(client, _m(acc, 'satdays_destroyed')), + satblocksDestroyed: createMetricPattern23(client, _m(acc, 'satblocks_destroyed')), + satdaysDestroyed: createMetricPattern23(client, _m(acc, 'satdays_destroyed')), sent: createSentPattern(client, _m(acc, 'sent')), }; } -/** - * @typedef {Object} SentPattern - * @property {MetricPattern26} base - * @property {BlockCountPattern} bitcoin - * @property {SumCumPattern} dollars - * @property {MetricPattern26} dollarsSource - * @property {SumCumPattern} sats - */ - -/** - * Create a SentPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {SentPattern} - */ -function createSentPattern(client, acc) { - return { - base: createMetricPattern26(client, _m(acc, 'height_fee')), - bitcoin: createBlockCountPattern(client, _m(acc, 'btc')), - dollars: createSumCumPattern(client, _m(acc, 'usd')), - dollarsSource: createMetricPattern26(client, _m(acc, 'usd')), - sats: createSumCumPattern(client, _m(acc, 'fee')), - }; -} - -/** - * @typedef {Object} SupplyPattern3 - * @property {SupplyPattern2} supply - * @property {ActiveSupplyPattern} supplyHalf - * @property {ActiveSupplyPattern} supplyHalfValue - * @property {SupplyValuePattern} supplyValue - * @property {MetricPattern1} utxoCount - */ - -/** - * Create a SupplyPattern3 pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {SupplyPattern3} - */ -function createSupplyPattern3(client, acc) { - return { - supply: createSupplyPattern2(client, _m(acc, 'supply')), - supplyHalf: createActiveSupplyPattern(client, _m(acc, 'supply_half')), - supplyHalfValue: createActiveSupplyPattern(client, _m(acc, 'supply_half')), - supplyValue: createSupplyValuePattern(client, _m(acc, 'supply')), - utxoCount: createMetricPattern1(client, _m(acc, 'utxo_count')), - }; -} - /** * @template T * @typedef {Object} PercentilesPattern - * @property {MetricPattern22} median - * @property {MetricPattern22} pct10 - * @property {MetricPattern22} pct25 - * @property {MetricPattern22} pct75 - * @property {MetricPattern22} pct90 + * @property {MetricPattern19} median + * @property {MetricPattern19} pct10 + * @property {MetricPattern19} pct25 + * @property {MetricPattern19} pct75 + * @property {MetricPattern19} pct90 */ /** @@ -3246,172 +3016,111 @@ function createSupplyPattern3(client, acc) { */ function createPercentilesPattern(client, acc) { return { - median: createMetricPattern22(client, _m(acc, 'median')), - pct10: createMetricPattern22(client, _m(acc, 'pct10')), - pct25: createMetricPattern22(client, _m(acc, 'pct25')), - pct75: createMetricPattern22(client, _m(acc, 'pct75')), - pct90: createMetricPattern22(client, _m(acc, 'pct90')), - }; -} - -/** - * @typedef {Object} SupplyPattern2 - * @property {MetricPattern26} base - * @property {MetricPattern5} bitcoin - * @property {MetricPattern5} dollars - * @property {MetricPattern7} sats - */ - -/** - * Create a SupplyPattern2 pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {SupplyPattern2} - */ -function createSupplyPattern2(client, acc) { - return { - base: createMetricPattern26(client, acc), - bitcoin: createMetricPattern5(client, _m(acc, 'btc')), - dollars: createMetricPattern5(client, _m(acc, 'usd')), - sats: createMetricPattern7(client, acc), + median: createMetricPattern19(client, _m(acc, 'median')), + pct10: createMetricPattern19(client, _m(acc, 'pct10')), + pct25: createMetricPattern19(client, _m(acc, 'pct25')), + pct75: createMetricPattern19(client, _m(acc, 'pct75')), + pct90: createMetricPattern19(client, _m(acc, 'pct90')), }; } /** * @template T - * @typedef {Object} PriceHighInSatsPattern - * @property {MetricPattern22} dateindex - * @property {MetricPattern26} height - * @property {MetricPattern24} max - * @property {MetricPattern7} rest - */ - -/** - * Create a PriceHighInSatsPattern pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {PriceHighInSatsPattern} - */ -function createPriceHighInSatsPattern(client, acc) { - return { - dateindex: createMetricPattern22(client, acc), - height: createMetricPattern26(client, acc), - max: createMetricPattern24(client, _m(acc, 'max')), - rest: createMetricPattern7(client, _m(acc, 'max')), - }; -} - -/** - * @template T - * @typedef {Object} PriceLowInSatsPattern - * @property {MetricPattern22} dateindex - * @property {MetricPattern26} height - * @property {MetricPattern24} min - * @property {MetricPattern7} rest - */ - -/** - * Create a PriceLowInSatsPattern pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {PriceLowInSatsPattern} - */ -function createPriceLowInSatsPattern(client, acc) { - return { - dateindex: createMetricPattern22(client, acc), - height: createMetricPattern26(client, acc), - min: createMetricPattern24(client, _m(acc, 'min')), - rest: createMetricPattern7(client, _m(acc, 'min')), - }; -} - -/** - * @template T - * @typedef {Object} BlockIntervalPattern - * @property {MetricPattern22} average - * @property {MetricPattern22} max - * @property {MetricPattern22} min + * @typedef {Object} IntervalPattern + * @property {MetricPattern2} average + * @property {MetricPattern23} base + * @property {MetricPattern2} max + * @property {MetricPattern2} min * @property {PercentilesPattern} percentiles */ /** - * Create a BlockIntervalPattern pattern node + * Create a IntervalPattern pattern node * @template T * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {BlockIntervalPattern} + * @returns {IntervalPattern} */ -function createBlockIntervalPattern(client, acc) { +function createIntervalPattern(client, acc) { return { - average: createMetricPattern22(client, _m(acc, 'average')), - max: createMetricPattern22(client, _m(acc, 'max')), - min: createMetricPattern22(client, _m(acc, 'min')), + average: createMetricPattern2(client, _m(acc, 'average')), + base: createMetricPattern23(client, acc), + max: createMetricPattern2(client, _m(acc, 'max')), + min: createMetricPattern2(client, _m(acc, 'min')), percentiles: createPercentilesPattern(client, acc), }; } /** - * @typedef {Object} ActiveSupplyPattern - * @property {MetricPattern1} bitcoin - * @property {MetricPattern1} dollars - * @property {MetricPattern1} sats + * @typedef {Object} SupplyInLossPattern + * @property {MetricPattern7} base + * @property {MetricPattern4} bitcoin + * @property {MetricPattern4} dollars + * @property {MetricPattern5} sats */ /** - * Create a ActiveSupplyPattern pattern node + * Create a SupplyInLossPattern pattern node * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {ActiveSupplyPattern} + * @returns {SupplyInLossPattern} */ -function createActiveSupplyPattern(client, acc) { +function createSupplyInLossPattern(client, acc) { return { - bitcoin: createMetricPattern1(client, _m(acc, 'btc')), - dollars: createMetricPattern1(client, _m(acc, 'usd')), - sats: createMetricPattern1(client, acc), + base: createMetricPattern7(client, acc), + bitcoin: createMetricPattern4(client, _m(acc, 'btc')), + dollars: createMetricPattern4(client, _m(acc, 'usd')), + sats: createMetricPattern5(client, acc), }; } /** - * @typedef {Object} CostBasisPattern2 - * @property {MetricPattern1} maxCostBasis - * @property {MetricPattern1} minCostBasis - * @property {PercentilesPattern2} percentiles + * @template T + * @typedef {Object} PriceHighSatsPattern + * @property {MetricPattern19} dateindex + * @property {MetricPattern21} difficultyepoch + * @property {MetricPattern23} height + * @property {MetricPattern5} rest */ /** - * Create a CostBasisPattern2 pattern node + * Create a PriceHighSatsPattern pattern node + * @template T * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {CostBasisPattern2} + * @returns {PriceHighSatsPattern} */ -function createCostBasisPattern2(client, acc) { +function createPriceHighSatsPattern(client, acc) { return { - maxCostBasis: createMetricPattern1(client, _m(acc, 'max_cost_basis')), - minCostBasis: createMetricPattern1(client, _m(acc, 'min_cost_basis')), - percentiles: createPercentilesPattern2(client, _m(acc, 'cost_basis')), + dateindex: createMetricPattern19(client, acc), + difficultyepoch: createMetricPattern21(client, _m(acc, 'max')), + height: createMetricPattern23(client, acc), + rest: createMetricPattern5(client, _m(acc, 'max')), }; } /** - * @typedef {Object} CoinbasePattern - * @property {BitcoinPattern} bitcoin - * @property {DollarsPattern} dollars - * @property {DollarsPattern} sats + * @template T + * @typedef {Object} TxVsizePattern + * @property {MetricPattern1} average + * @property {MetricPattern1} max + * @property {MetricPattern1} min + * @property {PercentilesPattern} percentiles */ /** - * Create a CoinbasePattern pattern node + * Create a TxVsizePattern pattern node + * @template T * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {CoinbasePattern} + * @returns {TxVsizePattern} */ -function createCoinbasePattern(client, acc) { +function createTxVsizePattern(client, acc) { return { - bitcoin: createBitcoinPattern(client, _m(acc, 'btc')), - dollars: createDollarsPattern(client, _m(acc, 'usd')), - sats: createDollarsPattern(client, acc), + average: createMetricPattern1(client, _m(acc, 'average')), + max: createMetricPattern1(client, _m(acc, 'max')), + min: createMetricPattern1(client, _m(acc, 'min')), + percentiles: createPercentilesPattern(client, acc), }; } @@ -3436,45 +3145,188 @@ function createUnclaimedRewardsPattern(client, acc) { }; } +/** + * @typedef {Object} CostBasisPattern2 + * @property {MetricPattern1} max + * @property {MetricPattern1} min + * @property {PercentilesPattern2} percentiles + */ + +/** + * Create a CostBasisPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {CostBasisPattern2} + */ +function createCostBasisPattern2(client, acc) { + return { + max: createMetricPattern1(client, _m(acc, 'max_cost_basis')), + min: createMetricPattern1(client, _m(acc, 'min_cost_basis')), + percentiles: createPercentilesPattern2(client, _m(acc, 'cost_basis')), + }; +} + +/** + * @typedef {Object} ActiveSupplyPattern + * @property {MetricPattern1} bitcoin + * @property {MetricPattern1} dollars + * @property {MetricPattern1} sats + */ + +/** + * Create a ActiveSupplyPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {ActiveSupplyPattern} + */ +function createActiveSupplyPattern(client, acc) { + return { + bitcoin: createMetricPattern1(client, _m(acc, 'btc')), + dollars: createMetricPattern1(client, _m(acc, 'usd')), + sats: createMetricPattern1(client, acc), + }; +} + +/** + * @typedef {Object} SentPattern + * @property {BlockCountPattern} bitcoin + * @property {BlockCountPattern} dollars + * @property {SatsPattern} sats + */ + +/** + * Create a SentPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {SentPattern} + */ +function createSentPattern(client, acc) { + return { + bitcoin: createBlockCountPattern(client, _m(acc, 'btc')), + dollars: createBlockCountPattern(client, _m(acc, 'usd')), + sats: createSatsPattern(client, acc), + }; +} + +/** + * @typedef {Object} CoinbasePattern + * @property {BitcoinPattern} bitcoin + * @property {BitcoinPattern} dollars + * @property {BitcoinPattern} sats + */ + +/** + * Create a CoinbasePattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {CoinbasePattern} + */ +function createCoinbasePattern(client, acc) { + return { + bitcoin: createBitcoinPattern(client, _m(acc, 'btc')), + dollars: createBitcoinPattern(client, _m(acc, 'usd')), + sats: createBitcoinPattern(client, acc), + }; +} + +/** + * @typedef {Object} SupplyPattern2 + * @property {ActiveSupplyPattern} supply + * @property {ActiveSupplyPattern} supplyHalf + * @property {MetricPattern1} utxoCount + */ + +/** + * Create a SupplyPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {SupplyPattern2} + */ +function createSupplyPattern2(client, acc) { + return { + supply: createActiveSupplyPattern(client, _m(acc, 'supply')), + supplyHalf: createActiveSupplyPattern(client, _m(acc, 'supply_half')), + utxoCount: createMetricPattern1(client, _m(acc, 'utxo_count')), + }; +} + /** * @template T - * @typedef {Object} BlockCountPattern - * @property {MetricPattern26} base + * @typedef {Object} SatsPattern + * @property {MetricPattern23} base * @property {MetricPattern2} cumulative * @property {MetricPattern2} sum */ /** - * Create a BlockCountPattern pattern node + * Create a SatsPattern pattern node * @template T * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {BlockCountPattern} + * @returns {SatsPattern} */ -function createBlockCountPattern(client, acc) { +function createSatsPattern(client, acc) { return { - base: createMetricPattern26(client, acc), + base: createMetricPattern23(client, acc), cumulative: createMetricPattern2(client, _m(acc, 'cumulative')), sum: createMetricPattern2(client, _m(acc, 'sum')), }; } /** - * @typedef {Object} SupplyValuePattern - * @property {MetricPattern26} bitcoin - * @property {MetricPattern26} dollars + * @typedef {Object} CostBasisPattern + * @property {MetricPattern1} max + * @property {MetricPattern1} min */ /** - * Create a SupplyValuePattern pattern node + * Create a CostBasisPattern pattern node * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {SupplyValuePattern} + * @returns {CostBasisPattern} */ -function createSupplyValuePattern(client, acc) { +function createCostBasisPattern(client, acc) { return { - bitcoin: createMetricPattern26(client, _m(acc, 'btc')), - dollars: createMetricPattern26(client, _m(acc, 'usd')), + max: createMetricPattern1(client, _m(acc, 'max_cost_basis')), + min: createMetricPattern1(client, _m(acc, 'min_cost_basis')), + }; +} + +/** + * @typedef {Object} SupplyInLossValuePattern + * @property {MetricPattern23} bitcoin + * @property {MetricPattern23} dollars + */ + +/** + * Create a SupplyInLossValuePattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {SupplyInLossValuePattern} + */ +function createSupplyInLossValuePattern(client, acc) { + return { + bitcoin: createMetricPattern23(client, _m(acc, 'btc')), + dollars: createMetricPattern23(client, _m(acc, 'usd')), + }; +} + +/** + * @typedef {Object} _1dReturns1mSdPattern + * @property {MetricPattern4} sd + * @property {MetricPattern4} sma + */ + +/** + * Create a _1dReturns1mSdPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {_1dReturns1mSdPattern} + */ +function create_1dReturns1mSdPattern(client, acc) { + return { + sd: createMetricPattern4(client, _m(acc, 'sd')), + sma: createMetricPattern4(client, _m(acc, 'sma')), }; } @@ -3497,131 +3349,30 @@ function createRelativePattern4(client, acc) { }; } -/** - * @typedef {Object} CostBasisPattern - * @property {MetricPattern1} maxCostBasis - * @property {MetricPattern1} minCostBasis - */ - -/** - * Create a CostBasisPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {CostBasisPattern} - */ -function createCostBasisPattern(client, acc) { - return { - maxCostBasis: createMetricPattern1(client, _m(acc, 'max_cost_basis')), - minCostBasis: createMetricPattern1(client, _m(acc, 'min_cost_basis')), - }; -} - -/** - * @typedef {Object} _1dReturns1mSdPattern - * @property {MetricPattern5} sd - * @property {MetricPattern5} sma - */ - -/** - * Create a _1dReturns1mSdPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {_1dReturns1mSdPattern} - */ -function create_1dReturns1mSdPattern(client, acc) { - return { - sd: createMetricPattern5(client, _m(acc, 'sd')), - sma: createMetricPattern5(client, _m(acc, 'sma')), - }; -} - /** * @template T - * @typedef {Object} MinmaxPattern - * @property {MetricPattern22} max - * @property {MetricPattern22} min - */ - -/** - * Create a MinmaxPattern pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {MinmaxPattern} - */ -function createMinmaxPattern(client, acc) { - return { - max: createMetricPattern22(client, _m(acc, 'max')), - min: createMetricPattern22(client, _m(acc, 'min')), - }; -} - -/** - * @template T - * @typedef {Object} SumCumPattern + * @typedef {Object} BlockCountPattern * @property {MetricPattern1} cumulative - * @property {MetricPattern2} sum + * @property {MetricPattern1} sum */ /** - * Create a SumCumPattern pattern node + * Create a BlockCountPattern pattern node * @template T * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {SumCumPattern} + * @returns {BlockCountPattern} */ -function createSumCumPattern(client, acc) { +function createBlockCountPattern(client, acc) { return { cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), - sum: createMetricPattern2(client, _m(acc, 'sum')), - }; -} - -/** - * @template T - * @typedef {Object} IndexesPattern2 - * @property {MetricPattern22} dateindex - * @property {MetricPattern7} rest - */ - -/** - * Create a IndexesPattern2 pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {IndexesPattern2} - */ -function createIndexesPattern2(client, acc) { - return { - dateindex: createMetricPattern22(client, acc), - rest: createMetricPattern7(client, _m(acc, 'average')), - }; -} - -/** - * @template T - * @typedef {Object} DifficultyAdjustmentPattern - * @property {MetricPattern26} base - * @property {MetricPattern2} rest - */ - -/** - * Create a DifficultyAdjustmentPattern pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {DifficultyAdjustmentPattern} - */ -function createDifficultyAdjustmentPattern(client, acc) { - return { - base: createMetricPattern26(client, acc), - rest: createMetricPattern2(client, _m(acc, 'sum')), + sum: createMetricPattern1(client, acc), }; } /** * @typedef {Object} RealizedPriceExtraPattern - * @property {MetricPattern5} ratio + * @property {MetricPattern4} ratio */ /** @@ -3632,7 +3383,7 @@ function createDifficultyAdjustmentPattern(client, acc) { */ function createRealizedPriceExtraPattern(client, acc) { return { - ratio: createMetricPattern5(client, _m(acc, 'ratio')), + ratio: createMetricPattern4(client, _m(acc, 'ratio')), }; } @@ -3640,75 +3391,92 @@ function createRealizedPriceExtraPattern(client, acc) { /** * @typedef {Object} CatalogTree - * @property {CatalogTree_Computed} computed - * @property {CatalogTree_Indexed} indexed + * @property {CatalogTree_Addresses} addresses + * @property {CatalogTree_Blocks} blocks + * @property {CatalogTree_Cointime} cointime + * @property {CatalogTree_Constants} constants + * @property {CatalogTree_Distribution} distribution + * @property {CatalogTree_Indexes} indexes + * @property {CatalogTree_Inputs} inputs + * @property {CatalogTree_Market} market + * @property {CatalogTree_Outputs} outputs + * @property {CatalogTree_Pools} pools + * @property {CatalogTree_Positions} positions + * @property {CatalogTree_Price} price + * @property {CatalogTree_Scripts} scripts + * @property {CatalogTree_Supply} supply + * @property {CatalogTree_Transactions} transactions */ /** - * @typedef {Object} CatalogTree_Computed - * @property {CatalogTree_Computed_Blocks} blocks - * @property {CatalogTree_Computed_Cointime} cointime - * @property {CatalogTree_Computed_Constants} constants - * @property {CatalogTree_Computed_Distribution} distribution - * @property {CatalogTree_Computed_Indexes} indexes - * @property {CatalogTree_Computed_Inputs} inputs - * @property {CatalogTree_Computed_Market} market - * @property {CatalogTree_Computed_Outputs} outputs - * @property {CatalogTree_Computed_Pools} pools - * @property {CatalogTree_Computed_Positions} positions - * @property {CatalogTree_Computed_Price} price - * @property {CatalogTree_Computed_Scripts} scripts - * @property {CatalogTree_Computed_Supply} supply - * @property {CatalogTree_Computed_Transactions} transactions + * @typedef {Object} CatalogTree_Addresses + * @property {MetricPattern23} firstP2aaddressindex + * @property {MetricPattern23} firstP2pk33addressindex + * @property {MetricPattern23} firstP2pk65addressindex + * @property {MetricPattern23} firstP2pkhaddressindex + * @property {MetricPattern23} firstP2shaddressindex + * @property {MetricPattern23} firstP2traddressindex + * @property {MetricPattern23} firstP2wpkhaddressindex + * @property {MetricPattern23} firstP2wshaddressindex + * @property {MetricPattern27} p2abytes + * @property {MetricPattern29} p2pk33bytes + * @property {MetricPattern30} p2pk65bytes + * @property {MetricPattern31} p2pkhbytes + * @property {MetricPattern32} p2shbytes + * @property {MetricPattern33} p2trbytes + * @property {MetricPattern34} p2wpkhbytes + * @property {MetricPattern35} p2wshbytes */ /** - * @typedef {Object} CatalogTree_Computed_Blocks - * @property {CatalogTree_Computed_Blocks_Count} count - * @property {CatalogTree_Computed_Blocks_Difficulty} difficulty - * @property {CatalogTree_Computed_Blocks_Halving} halving - * @property {CatalogTree_Computed_Blocks_Interval} interval - * @property {CatalogTree_Computed_Blocks_Mining} mining - * @property {CatalogTree_Computed_Blocks_Rewards} rewards - * @property {CatalogTree_Computed_Blocks_Size} size - * @property {CatalogTree_Computed_Blocks_Time} time - * @property {CatalogTree_Computed_Blocks_Weight} weight + * @typedef {Object} CatalogTree_Blocks + * @property {MetricPattern23} blockhash + * @property {CatalogTree_Blocks_Count} count + * @property {CatalogTree_Blocks_Difficulty} difficulty + * @property {CatalogTree_Blocks_Halving} halving + * @property {IntervalPattern} interval + * @property {CatalogTree_Blocks_Mining} mining + * @property {CatalogTree_Blocks_Rewards} rewards + * @property {CatalogTree_Blocks_Size} size + * @property {CatalogTree_Blocks_Time} time + * @property {MetricPattern23} timestamp + * @property {MetricPattern23} totalSize + * @property {CatalogTree_Blocks_Weight} weight */ /** - * @typedef {Object} CatalogTree_Computed_Blocks_Count - * @property {MetricPattern5} _1mBlockCount - * @property {MetricPattern5} _1wBlockCount - * @property {MetricPattern5} _1yBlockCount - * @property {MetricPattern26} _24hBlockCount + * @typedef {Object} CatalogTree_Blocks_Count + * @property {MetricPattern1} _1mBlockCount + * @property {MetricPattern23} _1mStart + * @property {MetricPattern1} _1wBlockCount + * @property {MetricPattern23} _1wStart + * @property {MetricPattern1} _1yBlockCount + * @property {MetricPattern23} _1yStart + * @property {MetricPattern1} _24hBlockCount + * @property {MetricPattern23} _24hStart * @property {BlockCountPattern} blockCount - * @property {MetricPattern5} blockCountTarget + * @property {MetricPattern4} blockCountTarget */ /** - * @typedef {Object} CatalogTree_Computed_Blocks_Difficulty + * @typedef {Object} CatalogTree_Blocks_Difficulty + * @property {MetricPattern23} base * @property {MetricPattern1} blocksBeforeNextDifficultyAdjustment * @property {MetricPattern1} daysBeforeNextDifficultyAdjustment - * @property {MetricPattern5} difficultyepoch + * @property {MetricPattern4} difficultyepoch */ /** - * @typedef {Object} CatalogTree_Computed_Blocks_Halving + * @typedef {Object} CatalogTree_Blocks_Halving * @property {MetricPattern1} blocksBeforeNextHalving * @property {MetricPattern1} daysBeforeNextHalving - * @property {MetricPattern5} halvingepoch + * @property {MetricPattern4} halvingepoch */ /** - * @typedef {Object} CatalogTree_Computed_Blocks_Interval - * @property {BlockIntervalPattern} blockInterval - * @property {MetricPattern26} interval - */ - -/** - * @typedef {Object} CatalogTree_Computed_Blocks_Mining + * @typedef {Object} CatalogTree_Blocks_Mining * @property {MetricPattern2} difficulty - * @property {DifficultyAdjustmentPattern} difficultyAdjustment + * @property {MetricPattern1} difficultyAdjustment * @property {MetricPattern1} difficultyAsHash * @property {MetricPattern1} hashPricePhs * @property {MetricPattern1} hashPricePhsMin @@ -3716,10 +3484,10 @@ function createRealizedPriceExtraPattern(client, acc) { * @property {MetricPattern1} hashPriceThs * @property {MetricPattern1} hashPriceThsMin * @property {MetricPattern1} hashRate - * @property {MetricPattern5} hashRate1mSma - * @property {MetricPattern5} hashRate1wSma - * @property {MetricPattern5} hashRate1ySma - * @property {MetricPattern5} hashRate2mSma + * @property {MetricPattern4} hashRate1mSma + * @property {MetricPattern4} hashRate1wSma + * @property {MetricPattern4} hashRate1ySma + * @property {MetricPattern4} hashRate2mSma * @property {MetricPattern1} hashValuePhs * @property {MetricPattern1} hashValuePhsMin * @property {MetricPattern1} hashValueRebound @@ -3728,50 +3496,51 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Blocks_Rewards - * @property {MetricPattern26} _24hCoinbaseSum - * @property {MetricPattern26} _24hCoinbaseUsdSum + * @typedef {Object} CatalogTree_Blocks_Rewards + * @property {MetricPattern23} _24hCoinbaseSum + * @property {MetricPattern23} _24hCoinbaseUsdSum * @property {CoinbasePattern} coinbase - * @property {MetricPattern22} feeDominance + * @property {MetricPattern19} feeDominance * @property {CoinbasePattern} subsidy - * @property {MetricPattern22} subsidyDominance - * @property {MetricPattern5} subsidyUsd1ySma + * @property {MetricPattern19} subsidyDominance + * @property {MetricPattern4} subsidyUsd1ySma * @property {UnclaimedRewardsPattern} unclaimedRewards */ /** - * @typedef {Object} CatalogTree_Computed_Blocks_Size + * @typedef {Object} CatalogTree_Blocks_Size * @property {BlockSizePattern} blockSize * @property {BlockSizePattern} blockVbytes - * @property {MetricPattern26} vbytes + * @property {MetricPattern23} vbytes */ /** - * @typedef {Object} CatalogTree_Computed_Blocks_Time - * @property {MetricPattern26} date - * @property {MetricPattern26} dateFixed + * @typedef {Object} CatalogTree_Blocks_Time + * @property {MetricPattern23} date + * @property {MetricPattern23} dateFixed * @property {MetricPattern2} timestamp - * @property {MetricPattern26} timestampFixed + * @property {MetricPattern23} timestampFixed */ /** - * @typedef {Object} CatalogTree_Computed_Blocks_Weight + * @typedef {Object} CatalogTree_Blocks_Weight + * @property {MetricPattern23} base * @property {BitcoinPattern} blockFullness * @property {BlockSizePattern} blockWeight */ /** - * @typedef {Object} CatalogTree_Computed_Cointime - * @property {CatalogTree_Computed_Cointime_Activity} activity - * @property {CatalogTree_Computed_Cointime_Adjusted} adjusted - * @property {CatalogTree_Computed_Cointime_Cap} cap - * @property {CatalogTree_Computed_Cointime_Pricing} pricing - * @property {CatalogTree_Computed_Cointime_Supply} supply - * @property {CatalogTree_Computed_Cointime_Value} value + * @typedef {Object} CatalogTree_Cointime + * @property {CatalogTree_Cointime_Activity} activity + * @property {CatalogTree_Cointime_Adjusted} adjusted + * @property {CatalogTree_Cointime_Cap} cap + * @property {CatalogTree_Cointime_Pricing} pricing + * @property {CatalogTree_Cointime_Supply} supply + * @property {CatalogTree_Cointime_Value} value */ /** - * @typedef {Object} CatalogTree_Computed_Cointime_Activity + * @typedef {Object} CatalogTree_Cointime_Activity * @property {MetricPattern1} activityToVaultednessRatio * @property {BlockCountPattern} coinblocksCreated * @property {BlockCountPattern} coinblocksStored @@ -3780,14 +3549,14 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Cointime_Adjusted - * @property {MetricPattern5} cointimeAdjInflationRate - * @property {MetricPattern5} cointimeAdjTxBtcVelocity - * @property {MetricPattern5} cointimeAdjTxUsdVelocity + * @typedef {Object} CatalogTree_Cointime_Adjusted + * @property {MetricPattern4} cointimeAdjInflationRate + * @property {MetricPattern4} cointimeAdjTxBtcVelocity + * @property {MetricPattern4} cointimeAdjTxUsdVelocity */ /** - * @typedef {Object} CatalogTree_Computed_Cointime_Cap + * @typedef {Object} CatalogTree_Cointime_Cap * @property {MetricPattern1} activeCap * @property {MetricPattern1} cointimeCap * @property {MetricPattern1} investorCap @@ -3796,7 +3565,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Cointime_Pricing + * @typedef {Object} CatalogTree_Cointime_Pricing * @property {MetricPattern1} activePrice * @property {ActivePriceRatioPattern} activePriceRatio * @property {MetricPattern1} cointimePrice @@ -3808,20 +3577,20 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Cointime_Supply + * @typedef {Object} CatalogTree_Cointime_Supply * @property {ActiveSupplyPattern} activeSupply * @property {ActiveSupplyPattern} vaultedSupply */ /** - * @typedef {Object} CatalogTree_Computed_Cointime_Value + * @typedef {Object} CatalogTree_Cointime_Value * @property {BlockCountPattern} cointimeValueCreated * @property {BlockCountPattern} cointimeValueDestroyed * @property {BlockCountPattern} cointimeValueStored */ /** - * @typedef {Object} CatalogTree_Computed_Constants + * @typedef {Object} CatalogTree_Constants * @property {MetricPattern3} constant0 * @property {MetricPattern3} constant1 * @property {MetricPattern3} constant100 @@ -3843,31 +3612,27 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution - * @property {MetricPattern1} addrCount - * @property {CatalogTree_Computed_Distribution_AddressCohorts} addressCohorts - * @property {CatalogTree_Computed_Distribution_AddressesData} addressesData - * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToHeightToAddrCount - * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToHeightToEmptyAddrCount - * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToIndexesToAddrCount - * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToIndexesToEmptyAddrCount - * @property {AddresstypeToHeightToAddrCountPattern} anyAddressIndexes - * @property {MetricPattern26} chainState - * @property {MetricPattern1} emptyAddrCount - * @property {MetricPattern42} emptyaddressindex - * @property {MetricPattern41} loadedaddressindex - * @property {CatalogTree_Computed_Distribution_UtxoCohorts} utxoCohorts + * @typedef {Object} CatalogTree_Distribution + * @property {AddrCountPattern} addrCount + * @property {CatalogTree_Distribution_AddressCohorts} addressCohorts + * @property {CatalogTree_Distribution_AddressesData} addressesData + * @property {CatalogTree_Distribution_AnyAddressIndexes} anyAddressIndexes + * @property {MetricPattern23} chainState + * @property {AddrCountPattern} emptyAddrCount + * @property {MetricPattern39} emptyaddressindex + * @property {MetricPattern38} loadedaddressindex + * @property {CatalogTree_Distribution_UtxoCohorts} utxoCohorts */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_AddressCohorts - * @property {CatalogTree_Computed_Distribution_AddressCohorts_AmountRange} amountRange - * @property {CatalogTree_Computed_Distribution_AddressCohorts_GeAmount} geAmount - * @property {CatalogTree_Computed_Distribution_AddressCohorts_LtAmount} ltAmount + * @typedef {Object} CatalogTree_Distribution_AddressCohorts + * @property {CatalogTree_Distribution_AddressCohorts_AmountRange} amountRange + * @property {CatalogTree_Distribution_AddressCohorts_GeAmount} geAmount + * @property {CatalogTree_Distribution_AddressCohorts_LtAmount} ltAmount */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_AddressCohorts_AmountRange + * @typedef {Object} CatalogTree_Distribution_AddressCohorts_AmountRange * @property {_0satsPattern} _0sats * @property {_0satsPattern} _100btcTo1kBtc * @property {_0satsPattern} _100kBtcOrMore @@ -3886,7 +3651,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_AddressCohorts_GeAmount + * @typedef {Object} CatalogTree_Distribution_AddressCohorts_GeAmount * @property {_0satsPattern} _100btc * @property {_0satsPattern} _100kSats * @property {_0satsPattern} _100sats @@ -3903,7 +3668,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_AddressCohorts_LtAmount + * @typedef {Object} CatalogTree_Distribution_AddressCohorts_LtAmount * @property {_0satsPattern} _100btc * @property {_0satsPattern} _100kBtc * @property {_0satsPattern} _100kSats @@ -3920,28 +3685,40 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_AddressesData - * @property {MetricPattern42} empty - * @property {MetricPattern41} loaded + * @typedef {Object} CatalogTree_Distribution_AddressesData + * @property {MetricPattern39} empty + * @property {MetricPattern38} loaded */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange} ageRange - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_All} all - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange} amountRange - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Epoch} epoch - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount} geAmount - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount} ltAmount - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge} maxAge - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_MinAge} minAge - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Term} term - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Type} type - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Year} year + * @typedef {Object} CatalogTree_Distribution_AnyAddressIndexes + * @property {MetricPattern27} p2a + * @property {MetricPattern29} p2pk33 + * @property {MetricPattern30} p2pk65 + * @property {MetricPattern31} p2pkh + * @property {MetricPattern32} p2sh + * @property {MetricPattern33} p2tr + * @property {MetricPattern34} p2wpkh + * @property {MetricPattern35} p2wsh */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts + * @property {CatalogTree_Distribution_UtxoCohorts_AgeRange} ageRange + * @property {CatalogTree_Distribution_UtxoCohorts_All} all + * @property {CatalogTree_Distribution_UtxoCohorts_AmountRange} amountRange + * @property {CatalogTree_Distribution_UtxoCohorts_Epoch} epoch + * @property {CatalogTree_Distribution_UtxoCohorts_GeAmount} geAmount + * @property {CatalogTree_Distribution_UtxoCohorts_LtAmount} ltAmount + * @property {CatalogTree_Distribution_UtxoCohorts_MaxAge} maxAge + * @property {CatalogTree_Distribution_UtxoCohorts_MinAge} minAge + * @property {CatalogTree_Distribution_UtxoCohorts_Term} term + * @property {CatalogTree_Distribution_UtxoCohorts_Type} type + * @property {CatalogTree_Distribution_UtxoCohorts_Year} year + */ + +/** + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_AgeRange * @property {_10yTo12yPattern} _10yTo12y * @property {_10yTo12yPattern} _12yTo15y * @property {_10yTo12yPattern} _1dTo1w @@ -3966,17 +3743,17 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_All + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_All * @property {ActivityPattern2} activity * @property {CostBasisPattern2} costBasis * @property {RealizedPattern3} realized - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative} relative - * @property {SupplyPattern3} supply + * @property {CatalogTree_Distribution_UtxoCohorts_All_Relative} relative + * @property {SupplyPattern2} supply * @property {UnrealizedPattern} unrealized */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_All_Relative * @property {MetricPattern3} negUnrealizedLossRelToOwnTotalUnrealizedPnl * @property {MetricPattern3} netUnrealizedPnlRelToOwnTotalUnrealizedPnl * @property {MetricPattern3} supplyInLossRelToOwnSupply @@ -3986,7 +3763,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_AmountRange * @property {_0satsPattern2} _0sats * @property {_0satsPattern2} _100btcTo1kBtc * @property {_0satsPattern2} _100kBtcOrMore @@ -4005,7 +3782,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Epoch + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_Epoch * @property {_0satsPattern2} _0 * @property {_0satsPattern2} _1 * @property {_0satsPattern2} _2 @@ -4014,7 +3791,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_GeAmount * @property {_100btcPattern} _100btc * @property {_100btcPattern} _100kSats * @property {_100btcPattern} _100sats @@ -4031,7 +3808,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_LtAmount * @property {_100btcPattern} _100btc * @property {_100btcPattern} _100kBtc * @property {_100btcPattern} _100kSats @@ -4048,7 +3825,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_MaxAge * @property {_10yPattern} _10y * @property {_10yPattern} _12y * @property {_10yPattern} _15y @@ -4070,7 +3847,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_MinAge + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_MinAge * @property {_100btcPattern} _10y * @property {_100btcPattern} _12y * @property {_100btcPattern} _1d @@ -4092,33 +3869,33 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Term - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long} long - * @property {CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short} short + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_Term + * @property {CatalogTree_Distribution_UtxoCohorts_Term_Long} long + * @property {CatalogTree_Distribution_UtxoCohorts_Term_Short} short */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_Term_Long * @property {ActivityPattern2} activity * @property {CostBasisPattern2} costBasis * @property {RealizedPattern2} realized * @property {RelativePattern5} relative - * @property {SupplyPattern3} supply + * @property {SupplyPattern2} supply * @property {UnrealizedPattern} unrealized */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_Term_Short * @property {ActivityPattern2} activity * @property {CostBasisPattern2} costBasis * @property {RealizedPattern3} realized * @property {RelativePattern5} relative - * @property {SupplyPattern3} supply + * @property {SupplyPattern2} supply * @property {UnrealizedPattern} unrealized */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Type + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_Type * @property {_0satsPattern2} empty * @property {_0satsPattern2} p2a * @property {_0satsPattern2} p2ms @@ -4133,7 +3910,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Distribution_UtxoCohorts_Year + * @typedef {Object} CatalogTree_Distribution_UtxoCohorts_Year * @property {_0satsPattern2} _2009 * @property {_0satsPattern2} _2010 * @property {_0satsPattern2} _2011 @@ -4155,113 +3932,118 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Indexes - * @property {CatalogTree_Computed_Indexes_Address} address - * @property {CatalogTree_Computed_Indexes_Block} block - * @property {CatalogTree_Computed_Indexes_Time} time - * @property {CatalogTree_Computed_Indexes_Transaction} transaction + * @typedef {Object} CatalogTree_Indexes + * @property {CatalogTree_Indexes_Address} address + * @property {CatalogTree_Indexes_Block} block + * @property {CatalogTree_Indexes_Time} time + * @property {CatalogTree_Indexes_Transaction} transaction */ /** - * @typedef {Object} CatalogTree_Computed_Indexes_Address - * @property {MetricPattern25} emptyoutputindex - * @property {MetricPattern28} opreturnindex - * @property {MetricPattern30} p2aaddressindex - * @property {MetricPattern31} p2msoutputindex - * @property {MetricPattern32} p2pk33addressindex - * @property {MetricPattern33} p2pk65addressindex - * @property {MetricPattern34} p2pkhaddressindex - * @property {MetricPattern35} p2shaddressindex - * @property {MetricPattern36} p2traddressindex - * @property {MetricPattern37} p2wpkhaddressindex - * @property {MetricPattern38} p2wshaddressindex - * @property {MetricPattern40} unknownoutputindex + * @typedef {Object} CatalogTree_Indexes_Address + * @property {MetricPattern22} emptyoutputindex + * @property {MetricPattern25} opreturnindex + * @property {MetricPattern27} p2aaddressindex + * @property {MetricPattern28} p2msoutputindex + * @property {MetricPattern29} p2pk33addressindex + * @property {MetricPattern30} p2pk65addressindex + * @property {MetricPattern31} p2pkhaddressindex + * @property {MetricPattern32} p2shaddressindex + * @property {MetricPattern33} p2traddressindex + * @property {MetricPattern34} p2wpkhaddressindex + * @property {MetricPattern35} p2wshaddressindex + * @property {MetricPattern37} unknownoutputindex */ /** - * @typedef {Object} CatalogTree_Computed_Indexes_Block - * @property {MetricPattern26} dateindex - * @property {MetricPattern15} difficultyepoch - * @property {MetricPattern14} firstHeight - * @property {MetricPattern16} halvingepoch - * @property {MetricPattern26} height - * @property {MetricPattern24} heightCount - * @property {MetricPattern26} txindexCount + * @typedef {Object} CatalogTree_Indexes_Block + * @property {MetricPattern23} dateindex + * @property {MetricPattern12} difficultyepoch + * @property {MetricPattern11} firstHeight + * @property {MetricPattern13} halvingepoch + * @property {MetricPattern23} height + * @property {MetricPattern21} heightCount + * @property {MetricPattern23} txindexCount */ /** - * @typedef {Object} CatalogTree_Computed_Indexes_Time - * @property {MetricPattern22} date - * @property {MetricPattern22} dateindex - * @property {MetricPattern20} dateindexCount - * @property {MetricPattern13} decadeindex - * @property {MetricPattern20} firstDateindex - * @property {MetricPattern22} firstHeight - * @property {MetricPattern9} firstMonthindex - * @property {MetricPattern23} firstYearindex - * @property {MetricPattern22} heightCount - * @property {MetricPattern11} monthindex - * @property {MetricPattern9} monthindexCount - * @property {MetricPattern18} quarterindex - * @property {MetricPattern19} semesterindex - * @property {MetricPattern12} weekindex - * @property {MetricPattern21} yearindex - * @property {MetricPattern23} yearindexCount + * @typedef {Object} CatalogTree_Indexes_Time + * @property {MetricPattern19} date + * @property {MetricPattern19} dateindex + * @property {MetricPattern17} dateindexCount + * @property {MetricPattern10} decadeindex + * @property {MetricPattern17} firstDateindex + * @property {MetricPattern19} firstHeight + * @property {MetricPattern6} firstMonthindex + * @property {MetricPattern20} firstYearindex + * @property {MetricPattern19} heightCount + * @property {MetricPattern8} monthindex + * @property {MetricPattern6} monthindexCount + * @property {MetricPattern15} quarterindex + * @property {MetricPattern16} semesterindex + * @property {MetricPattern9} weekindex + * @property {MetricPattern18} yearindex + * @property {MetricPattern20} yearindexCount */ /** - * @typedef {Object} CatalogTree_Computed_Indexes_Transaction - * @property {MetricPattern39} inputCount - * @property {MetricPattern39} outputCount - * @property {MetricPattern39} txindex - * @property {MetricPattern27} txinindex - * @property {MetricPattern29} txoutindex + * @typedef {Object} CatalogTree_Indexes_Transaction + * @property {MetricPattern36} inputCount + * @property {MetricPattern36} outputCount + * @property {MetricPattern36} txindex + * @property {MetricPattern24} txinindex + * @property {MetricPattern26} txoutindex */ /** - * @typedef {Object} CatalogTree_Computed_Inputs - * @property {CatalogTree_Computed_Inputs_Count} count - * @property {CatalogTree_Computed_Inputs_Spent} spent + * @typedef {Object} CatalogTree_Inputs + * @property {CatalogTree_Inputs_Count} count + * @property {MetricPattern23} firstTxinindex + * @property {MetricPattern24} outpoint + * @property {MetricPattern24} outputtype + * @property {CatalogTree_Inputs_Spent} spent + * @property {MetricPattern24} txindex + * @property {MetricPattern24} typeindex */ /** - * @typedef {Object} CatalogTree_Computed_Inputs_Count - * @property {CountPattern2} count + * @typedef {Object} CatalogTree_Inputs_Count + * @property {BlockSizePattern} count */ /** - * @typedef {Object} CatalogTree_Computed_Inputs_Spent - * @property {MetricPattern27} txoutindex - * @property {MetricPattern27} value + * @typedef {Object} CatalogTree_Inputs_Spent + * @property {MetricPattern24} txoutindex + * @property {MetricPattern24} value */ /** - * @typedef {Object} CatalogTree_Computed_Market - * @property {CatalogTree_Computed_Market_Ath} ath - * @property {CatalogTree_Computed_Market_Dca} dca - * @property {CatalogTree_Computed_Market_Indicators} indicators - * @property {CatalogTree_Computed_Market_Lookback} lookback - * @property {CatalogTree_Computed_Market_MovingAverage} movingAverage - * @property {CatalogTree_Computed_Market_Range} range - * @property {CatalogTree_Computed_Market_Returns} returns - * @property {CatalogTree_Computed_Market_Volatility} volatility + * @typedef {Object} CatalogTree_Market + * @property {CatalogTree_Market_Ath} ath + * @property {CatalogTree_Market_Dca} dca + * @property {CatalogTree_Market_Indicators} indicators + * @property {CatalogTree_Market_Lookback} lookback + * @property {CatalogTree_Market_MovingAverage} movingAverage + * @property {CatalogTree_Market_Range} range + * @property {CatalogTree_Market_Returns} returns + * @property {CatalogTree_Market_Volatility} volatility */ /** - * @typedef {Object} CatalogTree_Computed_Market_Ath - * @property {MetricPattern5} daysSincePriceAth - * @property {MetricPattern5} maxDaysBetweenPriceAths - * @property {MetricPattern5} maxYearsBetweenPriceAths + * @typedef {Object} CatalogTree_Market_Ath + * @property {MetricPattern4} daysSincePriceAth + * @property {MetricPattern4} maxDaysBetweenPriceAths + * @property {MetricPattern4} maxYearsBetweenPriceAths * @property {MetricPattern3} priceAth * @property {MetricPattern3} priceDrawdown - * @property {MetricPattern5} yearsSincePriceAth + * @property {MetricPattern4} yearsSincePriceAth */ /** - * @typedef {Object} CatalogTree_Computed_Market_Dca + * @typedef {Object} CatalogTree_Market_Dca * @property {ClassAveragePricePattern} classAveragePrice * @property {ClassAveragePricePattern} classReturns - * @property {CatalogTree_Computed_Market_Dca_ClassStack} classStack + * @property {CatalogTree_Market_Dca_ClassStack} classStack * @property {PeriodAveragePricePattern} periodAveragePrice * @property {PeriodCagrPattern} periodCagr * @property {PeriodLumpSumStackPattern} periodLumpSumStack @@ -4270,7 +4052,7 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Market_Dca_ClassStack + * @typedef {Object} CatalogTree_Market_Dca_ClassStack * @property {ActiveSupplyPattern} _2015 * @property {ActiveSupplyPattern} _2016 * @property {ActiveSupplyPattern} _2017 @@ -4285,35 +4067,35 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Market_Indicators - * @property {MetricPattern22} gini - * @property {MetricPattern22} macdHistogram - * @property {MetricPattern22} macdLine - * @property {MetricPattern22} macdSignal - * @property {MetricPattern5} nvt - * @property {MetricPattern22} piCycle - * @property {MetricPattern5} puellMultiple - * @property {MetricPattern22} rsi14d - * @property {MetricPattern22} rsi14dMax - * @property {MetricPattern22} rsi14dMin - * @property {MetricPattern22} rsiAverageGain14d - * @property {MetricPattern22} rsiAverageLoss14d - * @property {MetricPattern22} rsiGains - * @property {MetricPattern22} rsiLosses - * @property {MetricPattern22} stochD - * @property {MetricPattern22} stochK - * @property {MetricPattern22} stochRsi - * @property {MetricPattern22} stochRsiD - * @property {MetricPattern22} stochRsiK + * @typedef {Object} CatalogTree_Market_Indicators + * @property {MetricPattern19} gini + * @property {MetricPattern19} macdHistogram + * @property {MetricPattern19} macdLine + * @property {MetricPattern19} macdSignal + * @property {MetricPattern4} nvt + * @property {MetricPattern19} piCycle + * @property {MetricPattern4} puellMultiple + * @property {MetricPattern19} rsi14d + * @property {MetricPattern19} rsi14dMax + * @property {MetricPattern19} rsi14dMin + * @property {MetricPattern19} rsiAverageGain14d + * @property {MetricPattern19} rsiAverageLoss14d + * @property {MetricPattern19} rsiGains + * @property {MetricPattern19} rsiLosses + * @property {MetricPattern19} stochD + * @property {MetricPattern19} stochK + * @property {MetricPattern19} stochRsi + * @property {MetricPattern19} stochRsiD + * @property {MetricPattern19} stochRsiK */ /** - * @typedef {Object} CatalogTree_Computed_Market_Lookback + * @typedef {Object} CatalogTree_Market_Lookback * @property {PriceAgoPattern} priceAgo */ /** - * @typedef {Object} CatalogTree_Computed_Market_MovingAverage + * @typedef {Object} CatalogTree_Market_MovingAverage * @property {Price111dSmaPattern} price111dSma * @property {Price111dSmaPattern} price12dEma * @property {Price111dSmaPattern} price13dEma @@ -4328,8 +4110,8 @@ function createRealizedPriceExtraPattern(client, acc) { * @property {Price111dSmaPattern} price1ySma * @property {Price111dSmaPattern} price200dEma * @property {Price111dSmaPattern} price200dSma - * @property {MetricPattern5} price200dSmaX08 - * @property {MetricPattern5} price200dSmaX24 + * @property {MetricPattern4} price200dSmaX08 + * @property {MetricPattern4} price200dSmaX24 * @property {Price111dSmaPattern} price200wEma * @property {Price111dSmaPattern} price200wSma * @property {Price111dSmaPattern} price21dEma @@ -4340,7 +4122,7 @@ function createRealizedPriceExtraPattern(client, acc) { * @property {Price111dSmaPattern} price34dEma * @property {Price111dSmaPattern} price34dSma * @property {Price111dSmaPattern} price350dSma - * @property {MetricPattern5} price350dSmaX2 + * @property {MetricPattern4} price350dSmaX2 * @property {Price111dSmaPattern} price4yEma * @property {Price111dSmaPattern} price4ySma * @property {Price111dSmaPattern} price55dEma @@ -4352,22 +4134,22 @@ function createRealizedPriceExtraPattern(client, acc) { */ /** - * @typedef {Object} CatalogTree_Computed_Market_Range - * @property {MetricPattern5} price1mMax - * @property {MetricPattern5} price1mMin - * @property {MetricPattern5} price1wMax - * @property {MetricPattern5} price1wMin - * @property {MetricPattern5} price1yMax - * @property {MetricPattern5} price1yMin - * @property {MetricPattern5} price2wChoppinessIndex - * @property {MetricPattern5} price2wMax - * @property {MetricPattern5} price2wMin - * @property {MetricPattern22} priceTrueRange - * @property {MetricPattern22} priceTrueRange2wSum + * @typedef {Object} CatalogTree_Market_Range + * @property {MetricPattern4} price1mMax + * @property {MetricPattern4} price1mMin + * @property {MetricPattern4} price1wMax + * @property {MetricPattern4} price1wMin + * @property {MetricPattern4} price1yMax + * @property {MetricPattern4} price1yMin + * @property {MetricPattern4} price2wChoppinessIndex + * @property {MetricPattern4} price2wMax + * @property {MetricPattern4} price2wMin + * @property {MetricPattern19} priceTrueRange + * @property {MetricPattern19} priceTrueRange2wSum */ /** - * @typedef {Object} CatalogTree_Computed_Market_Returns + * @typedef {Object} CatalogTree_Market_Returns * @property {_1dReturns1mSdPattern} _1dReturns1mSd * @property {_1dReturns1mSdPattern} _1dReturns1wSd * @property {_1dReturns1mSdPattern} _1dReturns1ySd @@ -4375,477 +4157,375 @@ function createRealizedPriceExtraPattern(client, acc) { * @property {_1dReturns1mSdPattern} downside1mSd * @property {_1dReturns1mSdPattern} downside1wSd * @property {_1dReturns1mSdPattern} downside1ySd - * @property {MetricPattern22} downsideReturns + * @property {MetricPattern19} downsideReturns * @property {PriceAgoPattern} priceReturns */ /** - * @typedef {Object} CatalogTree_Computed_Market_Volatility - * @property {MetricPattern5} price1mVolatility - * @property {MetricPattern5} price1wVolatility - * @property {MetricPattern5} price1yVolatility - * @property {MetricPattern22} sharpe1m - * @property {MetricPattern22} sharpe1w - * @property {MetricPattern22} sharpe1y - * @property {MetricPattern22} sortino1m - * @property {MetricPattern22} sortino1w - * @property {MetricPattern22} sortino1y + * @typedef {Object} CatalogTree_Market_Volatility + * @property {MetricPattern4} price1mVolatility + * @property {MetricPattern4} price1wVolatility + * @property {MetricPattern4} price1yVolatility + * @property {MetricPattern19} sharpe1m + * @property {MetricPattern19} sharpe1w + * @property {MetricPattern19} sharpe1y + * @property {MetricPattern19} sortino1m + * @property {MetricPattern19} sortino1w + * @property {MetricPattern19} sortino1y */ /** - * @typedef {Object} CatalogTree_Computed_Outputs - * @property {CatalogTree_Computed_Outputs_Count} count - * @property {CatalogTree_Computed_Outputs_Spent} spent + * @typedef {Object} CatalogTree_Outputs + * @property {CatalogTree_Outputs_Count} count + * @property {MetricPattern23} firstTxoutindex + * @property {MetricPattern26} outputtype + * @property {CatalogTree_Outputs_Spent} spent + * @property {MetricPattern26} txindex + * @property {MetricPattern26} typeindex + * @property {MetricPattern26} value */ /** - * @typedef {Object} CatalogTree_Computed_Outputs_Count - * @property {CountPattern2} count - * @property {DollarsPattern} utxoCount + * @typedef {Object} CatalogTree_Outputs_Count + * @property {BlockSizePattern} count + * @property {BitcoinPattern} utxoCount */ /** - * @typedef {Object} CatalogTree_Computed_Outputs_Spent - * @property {MetricPattern29} txinindex + * @typedef {Object} CatalogTree_Outputs_Spent + * @property {MetricPattern26} txinindex */ /** - * @typedef {Object} CatalogTree_Computed_Pools - * @property {MetricPattern26} pool - * @property {CatalogTree_Computed_Pools_Vecs} vecs + * @typedef {Object} CatalogTree_Pools + * @property {MetricPattern23} pool + * @property {CatalogTree_Pools_Vecs} vecs */ /** - * @typedef {Object} CatalogTree_Computed_Pools_Vecs - * @property {AXbtPattern} aXbt - * @property {AXbtPattern} aaoPool - * @property {AXbtPattern} antPool - * @property {AXbtPattern} arkPool - * @property {AXbtPattern} asicMiner - * @property {AXbtPattern} batPool - * @property {AXbtPattern} bcMonster - * @property {AXbtPattern} bcpoolIo - * @property {AXbtPattern} binancePool - * @property {AXbtPattern} bitClub - * @property {AXbtPattern} bitFuFuPool - * @property {AXbtPattern} bitFury - * @property {AXbtPattern} bitMinter - * @property {AXbtPattern} bitalo - * @property {AXbtPattern} bitcoinAffiliateNetwork - * @property {AXbtPattern} bitcoinCom - * @property {AXbtPattern} bitcoinIndia - * @property {AXbtPattern} bitcoinRussia - * @property {AXbtPattern} bitcoinUkraine - * @property {AXbtPattern} bitfarms - * @property {AXbtPattern} bitparking - * @property {AXbtPattern} bitsolo - * @property {AXbtPattern} bixin - * @property {AXbtPattern} blockFills - * @property {AXbtPattern} braiinsPool - * @property {AXbtPattern} bravoMining - * @property {AXbtPattern} btPool - * @property {AXbtPattern} btcCom - * @property {AXbtPattern} btcDig - * @property {AXbtPattern} btcGuild - * @property {AXbtPattern} btcLab - * @property {AXbtPattern} btcMp - * @property {AXbtPattern} btcNuggets - * @property {AXbtPattern} btcPoolParty - * @property {AXbtPattern} btcServ - * @property {AXbtPattern} btcTop - * @property {AXbtPattern} btcc - * @property {AXbtPattern} bwPool - * @property {AXbtPattern} bytePool - * @property {AXbtPattern} canoe - * @property {AXbtPattern} canoePool - * @property {AXbtPattern} carbonNegative - * @property {AXbtPattern} ckPool - * @property {AXbtPattern} cloudHashing - * @property {AXbtPattern} coinLab - * @property {AXbtPattern} cointerra - * @property {AXbtPattern} connectBtc - * @property {AXbtPattern} dPool - * @property {AXbtPattern} dcExploration - * @property {AXbtPattern} dcex - * @property {AXbtPattern} digitalBtc - * @property {AXbtPattern} digitalXMintsy - * @property {AXbtPattern} eclipseMc - * @property {AXbtPattern} eightBaochi - * @property {AXbtPattern} ekanemBtc - * @property {AXbtPattern} eligius - * @property {AXbtPattern} emcdPool - * @property {AXbtPattern} entrustCharityPool - * @property {AXbtPattern} eobot - * @property {AXbtPattern} exxBw - * @property {AXbtPattern} f2Pool - * @property {AXbtPattern} fiftyEightCoin - * @property {AXbtPattern} foundryUsa - * @property {AXbtPattern} futureBitApolloSolo - * @property {AXbtPattern} gbMiners - * @property {AXbtPattern} ghashIo - * @property {AXbtPattern} giveMeCoins - * @property {AXbtPattern} goGreenLight - * @property {AXbtPattern} haoZhuZhu - * @property {AXbtPattern} haominer - * @property {AXbtPattern} hashBx - * @property {AXbtPattern} hashPool - * @property {AXbtPattern} helix - * @property {AXbtPattern} hhtt - * @property {AXbtPattern} hotPool - * @property {AXbtPattern} hummerpool - * @property {AXbtPattern} huobiPool - * @property {AXbtPattern} innopolisTech - * @property {AXbtPattern} kanoPool - * @property {AXbtPattern} kncMiner - * @property {AXbtPattern} kuCoinPool - * @property {AXbtPattern} lubianCom - * @property {AXbtPattern} luckyPool - * @property {AXbtPattern} luxor - * @property {AXbtPattern} maraPool - * @property {AXbtPattern} maxBtc - * @property {AXbtPattern} maxiPool - * @property {AXbtPattern} megaBigPower - * @property {AXbtPattern} minerium - * @property {AXbtPattern} miningCity - * @property {AXbtPattern} miningDutch - * @property {AXbtPattern} miningKings - * @property {AXbtPattern} miningSquared - * @property {AXbtPattern} mmpool - * @property {AXbtPattern} mtRed - * @property {AXbtPattern} multiCoinCo - * @property {AXbtPattern} multipool - * @property {AXbtPattern} myBtcCoinPool - * @property {AXbtPattern} neopool - * @property {AXbtPattern} nexious - * @property {AXbtPattern} niceHash - * @property {AXbtPattern} nmcBit - * @property {AXbtPattern} novaBlock - * @property {AXbtPattern} ocean - * @property {AXbtPattern} okExPool - * @property {AXbtPattern} okMiner - * @property {AXbtPattern} okkong - * @property {AXbtPattern} okpoolTop - * @property {AXbtPattern} oneHash - * @property {AXbtPattern} oneM1x - * @property {AXbtPattern} oneThash - * @property {AXbtPattern} ozCoin - * @property {AXbtPattern} pHashIo - * @property {AXbtPattern} parasite - * @property {AXbtPattern} patels - * @property {AXbtPattern} pegaPool - * @property {AXbtPattern} phoenix - * @property {AXbtPattern} polmine - * @property {AXbtPattern} pool175btc - * @property {AXbtPattern} pool50btc - * @property {AXbtPattern} poolin - * @property {AXbtPattern} portlandHodl - * @property {AXbtPattern} publicPool - * @property {AXbtPattern} pureBtcCom - * @property {AXbtPattern} rawpool - * @property {AXbtPattern} rigPool - * @property {AXbtPattern} sbiCrypto - * @property {AXbtPattern} secPool - * @property {AXbtPattern} secretSuperstar - * @property {AXbtPattern} sevenPool - * @property {AXbtPattern} shawnP0wers - * @property {AXbtPattern} sigmapoolCom - * @property {AXbtPattern} simplecoinUs - * @property {AXbtPattern} soloCk - * @property {AXbtPattern} spiderPool - * @property {AXbtPattern} stMiningCorp - * @property {AXbtPattern} tangpool - * @property {AXbtPattern} tatmasPool - * @property {AXbtPattern} tbDice - * @property {AXbtPattern} telco214 - * @property {AXbtPattern} terraPool - * @property {AXbtPattern} tiger - * @property {AXbtPattern} tigerpoolNet - * @property {AXbtPattern} titan - * @property {AXbtPattern} transactionCoinMining - * @property {AXbtPattern} trickysBtcPool - * @property {AXbtPattern} tripleMining - * @property {AXbtPattern} twentyOneInc - * @property {AXbtPattern} ultimusPool - * @property {AXbtPattern} unknown - * @property {AXbtPattern} unomp - * @property {AXbtPattern} viaBtc - * @property {AXbtPattern} waterhole - * @property {AXbtPattern} wayiCn - * @property {AXbtPattern} whitePool - * @property {AXbtPattern} wk057 - * @property {AXbtPattern} yourbtcNet - * @property {AXbtPattern} zulupool + * @typedef {Object} CatalogTree_Pools_Vecs + * @property {AaopoolPattern} aaopool + * @property {AaopoolPattern} antpool + * @property {AaopoolPattern} arkpool + * @property {AaopoolPattern} asicminer + * @property {AaopoolPattern} axbt + * @property {AaopoolPattern} batpool + * @property {AaopoolPattern} bcmonster + * @property {AaopoolPattern} bcpoolio + * @property {AaopoolPattern} binancepool + * @property {AaopoolPattern} bitalo + * @property {AaopoolPattern} bitclub + * @property {AaopoolPattern} bitcoinaffiliatenetwork + * @property {AaopoolPattern} bitcoincom + * @property {AaopoolPattern} bitcoinindia + * @property {AaopoolPattern} bitcoinrussia + * @property {AaopoolPattern} bitcoinukraine + * @property {AaopoolPattern} bitfarms + * @property {AaopoolPattern} bitfufupool + * @property {AaopoolPattern} bitfury + * @property {AaopoolPattern} bitminter + * @property {AaopoolPattern} bitparking + * @property {AaopoolPattern} bitsolo + * @property {AaopoolPattern} bixin + * @property {AaopoolPattern} blockfills + * @property {AaopoolPattern} braiinspool + * @property {AaopoolPattern} bravomining + * @property {AaopoolPattern} btcc + * @property {AaopoolPattern} btccom + * @property {AaopoolPattern} btcdig + * @property {AaopoolPattern} btcguild + * @property {AaopoolPattern} btclab + * @property {AaopoolPattern} btcmp + * @property {AaopoolPattern} btcnuggets + * @property {AaopoolPattern} btcpoolparty + * @property {AaopoolPattern} btcserv + * @property {AaopoolPattern} btctop + * @property {AaopoolPattern} btpool + * @property {AaopoolPattern} bwpool + * @property {AaopoolPattern} bytepool + * @property {AaopoolPattern} canoe + * @property {AaopoolPattern} canoepool + * @property {AaopoolPattern} carbonnegative + * @property {AaopoolPattern} ckpool + * @property {AaopoolPattern} cloudhashing + * @property {AaopoolPattern} coinlab + * @property {AaopoolPattern} cointerra + * @property {AaopoolPattern} connectbtc + * @property {AaopoolPattern} dcex + * @property {AaopoolPattern} dcexploration + * @property {AaopoolPattern} digitalbtc + * @property {AaopoolPattern} digitalxmintsy + * @property {AaopoolPattern} dpool + * @property {AaopoolPattern} eclipsemc + * @property {AaopoolPattern} eightbaochi + * @property {AaopoolPattern} ekanembtc + * @property {AaopoolPattern} eligius + * @property {AaopoolPattern} emcdpool + * @property {AaopoolPattern} entrustcharitypool + * @property {AaopoolPattern} eobot + * @property {AaopoolPattern} exxbw + * @property {AaopoolPattern} f2pool + * @property {AaopoolPattern} fiftyeightcoin + * @property {AaopoolPattern} foundryusa + * @property {AaopoolPattern} futurebitapollosolo + * @property {AaopoolPattern} gbminers + * @property {AaopoolPattern} ghashio + * @property {AaopoolPattern} givemecoins + * @property {AaopoolPattern} gogreenlight + * @property {AaopoolPattern} haominer + * @property {AaopoolPattern} haozhuzhu + * @property {AaopoolPattern} hashbx + * @property {AaopoolPattern} hashpool + * @property {AaopoolPattern} helix + * @property {AaopoolPattern} hhtt + * @property {AaopoolPattern} hotpool + * @property {AaopoolPattern} hummerpool + * @property {AaopoolPattern} huobipool + * @property {AaopoolPattern} innopolistech + * @property {AaopoolPattern} kanopool + * @property {AaopoolPattern} kncminer + * @property {AaopoolPattern} kucoinpool + * @property {AaopoolPattern} lubiancom + * @property {AaopoolPattern} luckypool + * @property {AaopoolPattern} luxor + * @property {AaopoolPattern} marapool + * @property {AaopoolPattern} maxbtc + * @property {AaopoolPattern} maxipool + * @property {AaopoolPattern} megabigpower + * @property {AaopoolPattern} minerium + * @property {AaopoolPattern} miningcity + * @property {AaopoolPattern} miningdutch + * @property {AaopoolPattern} miningkings + * @property {AaopoolPattern} miningsquared + * @property {AaopoolPattern} mmpool + * @property {AaopoolPattern} mtred + * @property {AaopoolPattern} multicoinco + * @property {AaopoolPattern} multipool + * @property {AaopoolPattern} mybtccoinpool + * @property {AaopoolPattern} neopool + * @property {AaopoolPattern} nexious + * @property {AaopoolPattern} nicehash + * @property {AaopoolPattern} nmcbit + * @property {AaopoolPattern} novablock + * @property {AaopoolPattern} ocean + * @property {AaopoolPattern} okexpool + * @property {AaopoolPattern} okkong + * @property {AaopoolPattern} okminer + * @property {AaopoolPattern} okpooltop + * @property {AaopoolPattern} onehash + * @property {AaopoolPattern} onem1x + * @property {AaopoolPattern} onethash + * @property {AaopoolPattern} ozcoin + * @property {AaopoolPattern} parasite + * @property {AaopoolPattern} patels + * @property {AaopoolPattern} pegapool + * @property {AaopoolPattern} phashio + * @property {AaopoolPattern} phoenix + * @property {AaopoolPattern} polmine + * @property {AaopoolPattern} pool175btc + * @property {AaopoolPattern} pool50btc + * @property {AaopoolPattern} poolin + * @property {AaopoolPattern} portlandhodl + * @property {AaopoolPattern} publicpool + * @property {AaopoolPattern} purebtccom + * @property {AaopoolPattern} rawpool + * @property {AaopoolPattern} rigpool + * @property {AaopoolPattern} sbicrypto + * @property {AaopoolPattern} secpool + * @property {AaopoolPattern} secretsuperstar + * @property {AaopoolPattern} sevenpool + * @property {AaopoolPattern} shawnp0wers + * @property {AaopoolPattern} sigmapoolcom + * @property {AaopoolPattern} simplecoinus + * @property {AaopoolPattern} solock + * @property {AaopoolPattern} spiderpool + * @property {AaopoolPattern} stminingcorp + * @property {AaopoolPattern} tangpool + * @property {AaopoolPattern} tatmaspool + * @property {AaopoolPattern} tbdice + * @property {AaopoolPattern} telco214 + * @property {AaopoolPattern} terrapool + * @property {AaopoolPattern} tiger + * @property {AaopoolPattern} tigerpoolnet + * @property {AaopoolPattern} titan + * @property {AaopoolPattern} transactioncoinmining + * @property {AaopoolPattern} trickysbtcpool + * @property {AaopoolPattern} triplemining + * @property {AaopoolPattern} twentyoneinc + * @property {AaopoolPattern} ultimuspool + * @property {AaopoolPattern} unknown + * @property {AaopoolPattern} unomp + * @property {AaopoolPattern} viabtc + * @property {AaopoolPattern} waterhole + * @property {AaopoolPattern} wayicn + * @property {AaopoolPattern} whitepool + * @property {AaopoolPattern} wk057 + * @property {AaopoolPattern} yourbtcnet + * @property {AaopoolPattern} zulupool */ /** - * @typedef {Object} CatalogTree_Computed_Positions - * @property {MetricPattern17} position + * @typedef {Object} CatalogTree_Positions + * @property {MetricPattern14} position */ /** - * @typedef {Object} CatalogTree_Computed_Price - * @property {CatalogTree_Computed_Price_Ohlc} ohlc - * @property {CatalogTree_Computed_Price_Sats} sats - * @property {CatalogTree_Computed_Price_Usd} usd + * @typedef {Object} CatalogTree_Price + * @property {CatalogTree_Price_Cents} cents + * @property {CatalogTree_Price_Sats} sats + * @property {CatalogTree_Price_Usd} usd */ /** - * @typedef {Object} CatalogTree_Computed_Price_Ohlc - * @property {MetricPattern10} ohlcInCents + * @typedef {Object} CatalogTree_Price_Cents + * @property {MetricPattern7} ohlc */ /** - * @typedef {Object} CatalogTree_Computed_Price_Sats - * @property {MetricPattern1} priceCloseInSats - * @property {PriceHighInSatsPattern} priceHighInSats - * @property {PriceLowInSatsPattern} priceLowInSats - * @property {MetricPattern1} priceOhlcInSats - * @property {MetricPattern1} priceOpenInSats + * @typedef {Object} CatalogTree_Price_Sats + * @property {MetricPattern1} priceCloseSats + * @property {PriceHighSatsPattern} priceHighSats + * @property {PriceHighSatsPattern} priceLowSats + * @property {MetricPattern1} priceOhlcSats + * @property {MetricPattern1} priceOpenSats */ /** - * @typedef {Object} CatalogTree_Computed_Price_Usd + * @typedef {Object} CatalogTree_Price_Usd * @property {MetricPattern1} priceClose - * @property {MetricPattern10} priceCloseInCents - * @property {PriceHighInSatsPattern} priceHigh - * @property {MetricPattern10} priceHighInCents - * @property {PriceLowInSatsPattern} priceLow - * @property {MetricPattern10} priceLowInCents + * @property {MetricPattern7} priceCloseCents + * @property {PriceHighSatsPattern} priceHigh + * @property {MetricPattern7} priceHighCents + * @property {PriceHighSatsPattern} priceLow + * @property {MetricPattern7} priceLowCents * @property {MetricPattern1} priceOhlc * @property {MetricPattern1} priceOpen - * @property {MetricPattern10} priceOpenInCents + * @property {MetricPattern7} priceOpenCents */ /** - * @typedef {Object} CatalogTree_Computed_Scripts - * @property {CatalogTree_Computed_Scripts_Count} count - * @property {CatalogTree_Computed_Scripts_Value} value + * @typedef {Object} CatalogTree_Scripts + * @property {CatalogTree_Scripts_Count} count + * @property {MetricPattern22} emptyToTxindex + * @property {MetricPattern23} firstEmptyoutputindex + * @property {MetricPattern23} firstOpreturnindex + * @property {MetricPattern23} firstP2msoutputindex + * @property {MetricPattern23} firstUnknownoutputindex + * @property {MetricPattern25} opreturnToTxindex + * @property {MetricPattern28} p2msToTxindex + * @property {MetricPattern37} unknownToTxindex + * @property {CatalogTree_Scripts_Value} value */ /** - * @typedef {Object} CatalogTree_Computed_Scripts_Count - * @property {DollarsPattern} emptyoutputCount - * @property {DollarsPattern} opreturnCount - * @property {DollarsPattern} p2aCount - * @property {DollarsPattern} p2msCount - * @property {DollarsPattern} p2pk33Count - * @property {DollarsPattern} p2pk65Count - * @property {DollarsPattern} p2pkhCount - * @property {DollarsPattern} p2shCount - * @property {DollarsPattern} p2trCount - * @property {DollarsPattern} p2wpkhCount - * @property {DollarsPattern} p2wshCount - * @property {BlockCountPattern} segwitAdoption - * @property {DollarsPattern} segwitCount - * @property {BlockCountPattern} taprootAdoption - * @property {DollarsPattern} unknownoutputCount + * @typedef {Object} CatalogTree_Scripts_Count + * @property {BitcoinPattern} emptyoutput + * @property {BitcoinPattern} opreturn + * @property {BitcoinPattern} p2a + * @property {BitcoinPattern} p2ms + * @property {BitcoinPattern} p2pk33 + * @property {BitcoinPattern} p2pk65 + * @property {BitcoinPattern} p2pkh + * @property {BitcoinPattern} p2sh + * @property {BitcoinPattern} p2tr + * @property {BitcoinPattern} p2wpkh + * @property {BitcoinPattern} p2wsh + * @property {BitcoinPattern} segwit + * @property {SatsPattern} segwitAdoption + * @property {SatsPattern} taprootAdoption + * @property {BitcoinPattern} unknownoutput */ /** - * @typedef {Object} CatalogTree_Computed_Scripts_Value - * @property {CoinbasePattern} opreturnValue + * @typedef {Object} CatalogTree_Scripts_Value + * @property {CoinbasePattern} opreturn */ /** - * @typedef {Object} CatalogTree_Computed_Supply - * @property {CatalogTree_Computed_Supply_Burned} burned - * @property {CatalogTree_Computed_Supply_Circulating} circulating - * @property {CatalogTree_Computed_Supply_Inflation} inflation - * @property {CatalogTree_Computed_Supply_MarketCap} marketCap - * @property {CatalogTree_Computed_Supply_Velocity} velocity + * @typedef {Object} CatalogTree_Supply + * @property {CatalogTree_Supply_Burned} burned + * @property {ActiveSupplyPattern} circulating + * @property {MetricPattern4} inflation + * @property {MetricPattern3} marketCap + * @property {CatalogTree_Supply_Velocity} velocity */ /** - * @typedef {Object} CatalogTree_Computed_Supply_Burned + * @typedef {Object} CatalogTree_Supply_Burned * @property {UnclaimedRewardsPattern} opreturn * @property {UnclaimedRewardsPattern} unspendable */ /** - * @typedef {Object} CatalogTree_Computed_Supply_Circulating - * @property {MetricPattern26} btc - * @property {ActiveSupplyPattern} indexes - * @property {MetricPattern26} sats - * @property {MetricPattern26} usd + * @typedef {Object} CatalogTree_Supply_Velocity + * @property {MetricPattern4} btc + * @property {MetricPattern4} usd */ /** - * @typedef {Object} CatalogTree_Computed_Supply_Inflation - * @property {IndexesPattern2} indexes + * @typedef {Object} CatalogTree_Transactions + * @property {MetricPattern36} baseSize + * @property {CatalogTree_Transactions_Count} count + * @property {CatalogTree_Transactions_Fees} fees + * @property {MetricPattern23} firstTxindex + * @property {MetricPattern36} firstTxinindex + * @property {MetricPattern36} firstTxoutindex + * @property {MetricPattern36} height + * @property {MetricPattern36} isExplicitlyRbf + * @property {MetricPattern36} rawlocktime + * @property {CatalogTree_Transactions_Size} size + * @property {MetricPattern36} totalSize + * @property {MetricPattern36} txid + * @property {MetricPattern36} txversion + * @property {CatalogTree_Transactions_Versions} versions + * @property {CatalogTree_Transactions_Volume} volume */ /** - * @typedef {Object} CatalogTree_Computed_Supply_MarketCap - * @property {MetricPattern26} height - * @property {MetricPattern5} indexes + * @typedef {Object} CatalogTree_Transactions_Count + * @property {MetricPattern36} isCoinbase + * @property {BitcoinPattern} txCount */ /** - * @typedef {Object} CatalogTree_Computed_Supply_Velocity - * @property {IndexesPattern2} btc - * @property {IndexesPattern2} usd + * @typedef {Object} CatalogTree_Transactions_Fees + * @property {CatalogTree_Transactions_Fees_Fee} fee + * @property {IntervalPattern} feeRate + * @property {MetricPattern36} inputValue + * @property {MetricPattern36} outputValue */ /** - * @typedef {Object} CatalogTree_Computed_Transactions - * @property {CatalogTree_Computed_Transactions_Count} count - * @property {CatalogTree_Computed_Transactions_Fees} fees - * @property {CatalogTree_Computed_Transactions_Size} size - * @property {CatalogTree_Computed_Transactions_Versions} versions - * @property {CatalogTree_Computed_Transactions_Volume} volume + * @typedef {Object} CatalogTree_Transactions_Fees_Fee + * @property {BlockSizePattern} bitcoin + * @property {BlockSizePattern} dollars + * @property {BitcoinPattern} sats */ /** - * @typedef {Object} CatalogTree_Computed_Transactions_Count - * @property {MetricPattern39} isCoinbase - * @property {DollarsPattern} txCount + * @typedef {Object} CatalogTree_Transactions_Size + * @property {TxVsizePattern} txVsize + * @property {TxVsizePattern} txWeight + * @property {MetricPattern36} vsize + * @property {MetricPattern36} weight */ /** - * @typedef {Object} CatalogTree_Computed_Transactions_Fees - * @property {CatalogTree_Computed_Transactions_Fees_Fee} fee - * @property {CatalogTree_Computed_Transactions_Fees_FeeRate} feeRate - * @property {MetricPattern39} inputValue - * @property {MetricPattern39} outputValue - */ - -/** - * @typedef {Object} CatalogTree_Computed_Transactions_Fees_Fee - * @property {MetricPattern39} base - * @property {CatalogTree_Computed_Transactions_Fees_Fee_Bitcoin} bitcoin - * @property {CountPattern2} dollars - * @property {CountPattern2} sats - */ - -/** - * @typedef {Object} CatalogTree_Computed_Transactions_Fees_Fee_Bitcoin - * @property {MetricPattern1} average - * @property {MetricPattern1} cumulative - * @property {MetricPattern1} max - * @property {MetricPattern1} min - * @property {MetricPattern1} sum - */ - -/** - * @typedef {Object} CatalogTree_Computed_Transactions_Fees_FeeRate - * @property {MetricPattern1} average - * @property {MetricPattern39} base - * @property {MetricPattern1} max - * @property {MetricPattern1} min - * @property {PercentilesPattern} percentiles - */ - -/** - * @typedef {Object} CatalogTree_Computed_Transactions_Size - * @property {BlockIntervalPattern} txVsize - * @property {BlockIntervalPattern} txWeight - * @property {MetricPattern39} vsize - * @property {MetricPattern39} weight - */ - -/** - * @typedef {Object} CatalogTree_Computed_Transactions_Versions + * @typedef {Object} CatalogTree_Transactions_Versions * @property {BlockCountPattern} txV1 * @property {BlockCountPattern} txV2 * @property {BlockCountPattern} txV3 */ /** - * @typedef {Object} CatalogTree_Computed_Transactions_Volume - * @property {MetricPattern5} annualizedVolume - * @property {MetricPattern5} annualizedVolumeBtc - * @property {MetricPattern5} annualizedVolumeUsd - * @property {MetricPattern5} inputsPerSec - * @property {MetricPattern5} outputsPerSec - * @property {CatalogTree_Computed_Transactions_Volume_SentSum} sentSum - * @property {MetricPattern5} txPerSec - */ - -/** - * @typedef {Object} CatalogTree_Computed_Transactions_Volume_SentSum - * @property {MetricPattern1} bitcoin - * @property {DifficultyAdjustmentPattern} dollars - * @property {DifficultyAdjustmentPattern} sats - */ - -/** - * @typedef {Object} CatalogTree_Indexed - * @property {CatalogTree_Indexed_Address} address - * @property {CatalogTree_Indexed_Block} block - * @property {CatalogTree_Indexed_Output} output - * @property {CatalogTree_Indexed_Tx} tx - * @property {CatalogTree_Indexed_Txin} txin - * @property {CatalogTree_Indexed_Txout} txout - */ - -/** - * @typedef {Object} CatalogTree_Indexed_Address - * @property {MetricPattern26} firstP2aaddressindex - * @property {MetricPattern26} firstP2pk33addressindex - * @property {MetricPattern26} firstP2pk65addressindex - * @property {MetricPattern26} firstP2pkhaddressindex - * @property {MetricPattern26} firstP2shaddressindex - * @property {MetricPattern26} firstP2traddressindex - * @property {MetricPattern26} firstP2wpkhaddressindex - * @property {MetricPattern26} firstP2wshaddressindex - * @property {MetricPattern30} p2abytes - * @property {MetricPattern32} p2pk33bytes - * @property {MetricPattern33} p2pk65bytes - * @property {MetricPattern34} p2pkhbytes - * @property {MetricPattern35} p2shbytes - * @property {MetricPattern36} p2trbytes - * @property {MetricPattern37} p2wpkhbytes - * @property {MetricPattern38} p2wshbytes - */ - -/** - * @typedef {Object} CatalogTree_Indexed_Block - * @property {MetricPattern26} blockhash - * @property {MetricPattern26} difficulty - * @property {MetricPattern26} timestamp - * @property {MetricPattern26} totalSize - * @property {MetricPattern26} weight - */ - -/** - * @typedef {Object} CatalogTree_Indexed_Output - * @property {MetricPattern26} firstEmptyoutputindex - * @property {MetricPattern26} firstOpreturnindex - * @property {MetricPattern26} firstP2msoutputindex - * @property {MetricPattern26} firstUnknownoutputindex - * @property {MetricPattern8} txindex - */ - -/** - * @typedef {Object} CatalogTree_Indexed_Tx - * @property {MetricPattern39} baseSize - * @property {MetricPattern26} firstTxindex - * @property {MetricPattern39} firstTxinindex - * @property {MetricPattern39} firstTxoutindex - * @property {MetricPattern39} height - * @property {MetricPattern39} isExplicitlyRbf - * @property {MetricPattern39} rawlocktime - * @property {MetricPattern39} totalSize - * @property {MetricPattern39} txid - * @property {MetricPattern39} txversion - */ - -/** - * @typedef {Object} CatalogTree_Indexed_Txin - * @property {MetricPattern26} firstTxinindex - * @property {MetricPattern27} outpoint - * @property {MetricPattern27} outputtype - * @property {MetricPattern27} txindex - * @property {MetricPattern27} typeindex - */ - -/** - * @typedef {Object} CatalogTree_Indexed_Txout - * @property {MetricPattern26} firstTxoutindex - * @property {MetricPattern29} outputtype - * @property {MetricPattern29} txindex - * @property {MetricPattern29} typeindex - * @property {MetricPattern29} value + * @typedef {Object} CatalogTree_Transactions_Volume + * @property {MetricPattern4} annualizedVolume + * @property {MetricPattern4} annualizedVolumeBtc + * @property {MetricPattern4} annualizedVolumeUsd + * @property {MetricPattern4} inputsPerSec + * @property {MetricPattern4} outputsPerSec + * @property {ActiveSupplyPattern} sentSum + * @property {MetricPattern4} txPerSec */ /** @@ -5047,702 +4727,702 @@ class BrkClient extends BrkClientBase { }); TERM_NAMES = /** @type {const} */ ({ - "short": { - "id": "sth", - "short": "STH", - "long": "Short Term Holders" - }, "long": { "id": "lth", - "short": "LTH", - "long": "Long Term Holders" + "long": "Long Term Holders", + "short": "LTH" + }, + "short": { + "id": "sth", + "long": "Short Term Holders", + "short": "STH" } }); EPOCH_NAMES = /** @type {const} */ ({ "_0": { "id": "epoch_0", - "short": "Epoch 0", - "long": "Epoch 0" + "long": "Epoch 0", + "short": "Epoch 0" }, "_1": { "id": "epoch_1", - "short": "Epoch 1", - "long": "Epoch 1" + "long": "Epoch 1", + "short": "Epoch 1" }, "_2": { "id": "epoch_2", - "short": "Epoch 2", - "long": "Epoch 2" + "long": "Epoch 2", + "short": "Epoch 2" }, "_3": { "id": "epoch_3", - "short": "Epoch 3", - "long": "Epoch 3" + "long": "Epoch 3", + "short": "Epoch 3" }, "_4": { "id": "epoch_4", - "short": "Epoch 4", - "long": "Epoch 4" + "long": "Epoch 4", + "short": "Epoch 4" } }); YEAR_NAMES = /** @type {const} */ ({ "_2009": { "id": "year_2009", - "short": "2009", - "long": "Year 2009" + "long": "Year 2009", + "short": "2009" }, "_2010": { "id": "year_2010", - "short": "2010", - "long": "Year 2010" + "long": "Year 2010", + "short": "2010" }, "_2011": { "id": "year_2011", - "short": "2011", - "long": "Year 2011" + "long": "Year 2011", + "short": "2011" }, "_2012": { "id": "year_2012", - "short": "2012", - "long": "Year 2012" + "long": "Year 2012", + "short": "2012" }, "_2013": { "id": "year_2013", - "short": "2013", - "long": "Year 2013" + "long": "Year 2013", + "short": "2013" }, "_2014": { "id": "year_2014", - "short": "2014", - "long": "Year 2014" + "long": "Year 2014", + "short": "2014" }, "_2015": { "id": "year_2015", - "short": "2015", - "long": "Year 2015" + "long": "Year 2015", + "short": "2015" }, "_2016": { "id": "year_2016", - "short": "2016", - "long": "Year 2016" + "long": "Year 2016", + "short": "2016" }, "_2017": { "id": "year_2017", - "short": "2017", - "long": "Year 2017" + "long": "Year 2017", + "short": "2017" }, "_2018": { "id": "year_2018", - "short": "2018", - "long": "Year 2018" + "long": "Year 2018", + "short": "2018" }, "_2019": { "id": "year_2019", - "short": "2019", - "long": "Year 2019" + "long": "Year 2019", + "short": "2019" }, "_2020": { "id": "year_2020", - "short": "2020", - "long": "Year 2020" + "long": "Year 2020", + "short": "2020" }, "_2021": { "id": "year_2021", - "short": "2021", - "long": "Year 2021" + "long": "Year 2021", + "short": "2021" }, "_2022": { "id": "year_2022", - "short": "2022", - "long": "Year 2022" + "long": "Year 2022", + "short": "2022" }, "_2023": { "id": "year_2023", - "short": "2023", - "long": "Year 2023" + "long": "Year 2023", + "short": "2023" }, "_2024": { "id": "year_2024", - "short": "2024", - "long": "Year 2024" + "long": "Year 2024", + "short": "2024" }, "_2025": { "id": "year_2025", - "short": "2025", - "long": "Year 2025" + "long": "Year 2025", + "short": "2025" }, "_2026": { "id": "year_2026", - "short": "2026", - "long": "Year 2026" + "long": "Year 2026", + "short": "2026" } }); SPENDABLE_TYPE_NAMES = /** @type {const} */ ({ - "p2pk65": { - "id": "p2pk65", - "short": "P2PK65", - "long": "Pay to Public Key (65 bytes)" - }, - "p2pk33": { - "id": "p2pk33", - "short": "P2PK33", - "long": "Pay to Public Key (33 bytes)" - }, - "p2pkh": { - "id": "p2pkh", - "short": "P2PKH", - "long": "Pay to Public Key Hash" - }, - "p2ms": { - "id": "p2ms", - "short": "P2MS", - "long": "Pay to Multisig" - }, - "p2sh": { - "id": "p2sh", - "short": "P2SH", - "long": "Pay to Script Hash" - }, - "p2wpkh": { - "id": "p2wpkh", - "short": "P2WPKH", - "long": "Pay to Witness Public Key Hash" - }, - "p2wsh": { - "id": "p2wsh", - "short": "P2WSH", - "long": "Pay to Witness Script Hash" - }, - "p2tr": { - "id": "p2tr", - "short": "P2TR", - "long": "Pay to Taproot" + "empty": { + "id": "empty_outputs", + "long": "Empty Output", + "short": "Empty" }, "p2a": { "id": "p2a", - "short": "P2A", - "long": "Pay to Anchor" + "long": "Pay to Anchor", + "short": "P2A" + }, + "p2ms": { + "id": "p2ms", + "long": "Pay to Multisig", + "short": "P2MS" + }, + "p2pk33": { + "id": "p2pk33", + "long": "Pay to Public Key (33 bytes)", + "short": "P2PK33" + }, + "p2pk65": { + "id": "p2pk65", + "long": "Pay to Public Key (65 bytes)", + "short": "P2PK65" + }, + "p2pkh": { + "id": "p2pkh", + "long": "Pay to Public Key Hash", + "short": "P2PKH" + }, + "p2sh": { + "id": "p2sh", + "long": "Pay to Script Hash", + "short": "P2SH" + }, + "p2tr": { + "id": "p2tr", + "long": "Pay to Taproot", + "short": "P2TR" + }, + "p2wpkh": { + "id": "p2wpkh", + "long": "Pay to Witness Public Key Hash", + "short": "P2WPKH" + }, + "p2wsh": { + "id": "p2wsh", + "long": "Pay to Witness Script Hash", + "short": "P2WSH" }, "unknown": { "id": "unknown_outputs", - "short": "Unknown", - "long": "Unknown Output Type" - }, - "empty": { - "id": "empty_outputs", - "short": "Empty", - "long": "Empty Output" + "long": "Unknown Output Type", + "short": "Unknown" } }); AGE_RANGE_NAMES = /** @type {const} */ ({ - "upTo1h": { - "id": "up_to_1h_old", - "short": "<1h", - "long": "Up to 1 Hour Old" - }, - "_1hTo1d": { - "id": "at_least_1h_up_to_1d_old", - "short": "1h-1d", - "long": "1 Hour to 1 Day Old" - }, - "_1dTo1w": { - "id": "at_least_1d_up_to_1w_old", - "short": "1d-1w", - "long": "1 Day to 1 Week Old" - }, - "_1wTo1m": { - "id": "at_least_1w_up_to_1m_old", - "short": "1w-1m", - "long": "1 Week to 1 Month Old" - }, - "_1mTo2m": { - "id": "at_least_1m_up_to_2m_old", - "short": "1m-2m", - "long": "1 to 2 Months Old" - }, - "_2mTo3m": { - "id": "at_least_2m_up_to_3m_old", - "short": "2m-3m", - "long": "2 to 3 Months Old" - }, - "_3mTo4m": { - "id": "at_least_3m_up_to_4m_old", - "short": "3m-4m", - "long": "3 to 4 Months Old" - }, - "_4mTo5m": { - "id": "at_least_4m_up_to_5m_old", - "short": "4m-5m", - "long": "4 to 5 Months Old" - }, - "_5mTo6m": { - "id": "at_least_5m_up_to_6m_old", - "short": "5m-6m", - "long": "5 to 6 Months Old" - }, - "_6mTo1y": { - "id": "at_least_6m_up_to_1y_old", - "short": "6m-1y", - "long": "6 Months to 1 Year Old" - }, - "_1yTo2y": { - "id": "at_least_1y_up_to_2y_old", - "short": "1y-2y", - "long": "1 to 2 Years Old" - }, - "_2yTo3y": { - "id": "at_least_2y_up_to_3y_old", - "short": "2y-3y", - "long": "2 to 3 Years Old" - }, - "_3yTo4y": { - "id": "at_least_3y_up_to_4y_old", - "short": "3y-4y", - "long": "3 to 4 Years Old" - }, - "_4yTo5y": { - "id": "at_least_4y_up_to_5y_old", - "short": "4y-5y", - "long": "4 to 5 Years Old" - }, - "_5yTo6y": { - "id": "at_least_5y_up_to_6y_old", - "short": "5y-6y", - "long": "5 to 6 Years Old" - }, - "_6yTo7y": { - "id": "at_least_6y_up_to_7y_old", - "short": "6y-7y", - "long": "6 to 7 Years Old" - }, - "_7yTo8y": { - "id": "at_least_7y_up_to_8y_old", - "short": "7y-8y", - "long": "7 to 8 Years Old" - }, - "_8yTo10y": { - "id": "at_least_8y_up_to_10y_old", - "short": "8y-10y", - "long": "8 to 10 Years Old" - }, "_10yTo12y": { "id": "at_least_10y_up_to_12y_old", - "short": "10y-12y", - "long": "10 to 12 Years Old" + "long": "10 to 12 Years Old", + "short": "10y-12y" }, "_12yTo15y": { "id": "at_least_12y_up_to_15y_old", - "short": "12y-15y", - "long": "12 to 15 Years Old" + "long": "12 to 15 Years Old", + "short": "12y-15y" + }, + "_1dTo1w": { + "id": "at_least_1d_up_to_1w_old", + "long": "1 Day to 1 Week Old", + "short": "1d-1w" + }, + "_1hTo1d": { + "id": "at_least_1h_up_to_1d_old", + "long": "1 Hour to 1 Day Old", + "short": "1h-1d" + }, + "_1mTo2m": { + "id": "at_least_1m_up_to_2m_old", + "long": "1 to 2 Months Old", + "short": "1m-2m" + }, + "_1wTo1m": { + "id": "at_least_1w_up_to_1m_old", + "long": "1 Week to 1 Month Old", + "short": "1w-1m" + }, + "_1yTo2y": { + "id": "at_least_1y_up_to_2y_old", + "long": "1 to 2 Years Old", + "short": "1y-2y" + }, + "_2mTo3m": { + "id": "at_least_2m_up_to_3m_old", + "long": "2 to 3 Months Old", + "short": "2m-3m" + }, + "_2yTo3y": { + "id": "at_least_2y_up_to_3y_old", + "long": "2 to 3 Years Old", + "short": "2y-3y" + }, + "_3mTo4m": { + "id": "at_least_3m_up_to_4m_old", + "long": "3 to 4 Months Old", + "short": "3m-4m" + }, + "_3yTo4y": { + "id": "at_least_3y_up_to_4y_old", + "long": "3 to 4 Years Old", + "short": "3y-4y" + }, + "_4mTo5m": { + "id": "at_least_4m_up_to_5m_old", + "long": "4 to 5 Months Old", + "short": "4m-5m" + }, + "_4yTo5y": { + "id": "at_least_4y_up_to_5y_old", + "long": "4 to 5 Years Old", + "short": "4y-5y" + }, + "_5mTo6m": { + "id": "at_least_5m_up_to_6m_old", + "long": "5 to 6 Months Old", + "short": "5m-6m" + }, + "_5yTo6y": { + "id": "at_least_5y_up_to_6y_old", + "long": "5 to 6 Years Old", + "short": "5y-6y" + }, + "_6mTo1y": { + "id": "at_least_6m_up_to_1y_old", + "long": "6 Months to 1 Year Old", + "short": "6m-1y" + }, + "_6yTo7y": { + "id": "at_least_6y_up_to_7y_old", + "long": "6 to 7 Years Old", + "short": "6y-7y" + }, + "_7yTo8y": { + "id": "at_least_7y_up_to_8y_old", + "long": "7 to 8 Years Old", + "short": "7y-8y" + }, + "_8yTo10y": { + "id": "at_least_8y_up_to_10y_old", + "long": "8 to 10 Years Old", + "short": "8y-10y" }, "from15y": { "id": "at_least_15y_old", - "short": "15y+", - "long": "15+ Years Old" + "long": "15+ Years Old", + "short": "15y+" + }, + "upTo1h": { + "id": "up_to_1h_old", + "long": "Up to 1 Hour Old", + "short": "<1h" } }); MAX_AGE_NAMES = /** @type {const} */ ({ - "_1w": { - "id": "up_to_1w_old", - "short": "<1w", - "long": "Up to 1 Week Old" - }, - "_1m": { - "id": "up_to_1m_old", - "short": "<1m", - "long": "Up to 1 Month Old" - }, - "_2m": { - "id": "up_to_2m_old", - "short": "<2m", - "long": "Up to 2 Months Old" - }, - "_3m": { - "id": "up_to_3m_old", - "short": "<3m", - "long": "Up to 3 Months Old" - }, - "_4m": { - "id": "up_to_4m_old", - "short": "<4m", - "long": "Up to 4 Months Old" - }, - "_5m": { - "id": "up_to_5m_old", - "short": "<5m", - "long": "Up to 5 Months Old" - }, - "_6m": { - "id": "up_to_6m_old", - "short": "<6m", - "long": "Up to 6 Months Old" - }, - "_1y": { - "id": "up_to_1y_old", - "short": "<1y", - "long": "Up to 1 Year Old" - }, - "_2y": { - "id": "up_to_2y_old", - "short": "<2y", - "long": "Up to 2 Years Old" - }, - "_3y": { - "id": "up_to_3y_old", - "short": "<3y", - "long": "Up to 3 Years Old" - }, - "_4y": { - "id": "up_to_4y_old", - "short": "<4y", - "long": "Up to 4 Years Old" - }, - "_5y": { - "id": "up_to_5y_old", - "short": "<5y", - "long": "Up to 5 Years Old" - }, - "_6y": { - "id": "up_to_6y_old", - "short": "<6y", - "long": "Up to 6 Years Old" - }, - "_7y": { - "id": "up_to_7y_old", - "short": "<7y", - "long": "Up to 7 Years Old" - }, - "_8y": { - "id": "up_to_8y_old", - "short": "<8y", - "long": "Up to 8 Years Old" - }, "_10y": { "id": "up_to_10y_old", - "short": "<10y", - "long": "Up to 10 Years Old" + "long": "Up to 10 Years Old", + "short": "<10y" }, "_12y": { "id": "up_to_12y_old", - "short": "<12y", - "long": "Up to 12 Years Old" + "long": "Up to 12 Years Old", + "short": "<12y" }, "_15y": { "id": "up_to_15y_old", - "short": "<15y", - "long": "Up to 15 Years Old" + "long": "Up to 15 Years Old", + "short": "<15y" + }, + "_1m": { + "id": "up_to_1m_old", + "long": "Up to 1 Month Old", + "short": "<1m" + }, + "_1w": { + "id": "up_to_1w_old", + "long": "Up to 1 Week Old", + "short": "<1w" + }, + "_1y": { + "id": "up_to_1y_old", + "long": "Up to 1 Year Old", + "short": "<1y" + }, + "_2m": { + "id": "up_to_2m_old", + "long": "Up to 2 Months Old", + "short": "<2m" + }, + "_2y": { + "id": "up_to_2y_old", + "long": "Up to 2 Years Old", + "short": "<2y" + }, + "_3m": { + "id": "up_to_3m_old", + "long": "Up to 3 Months Old", + "short": "<3m" + }, + "_3y": { + "id": "up_to_3y_old", + "long": "Up to 3 Years Old", + "short": "<3y" + }, + "_4m": { + "id": "up_to_4m_old", + "long": "Up to 4 Months Old", + "short": "<4m" + }, + "_4y": { + "id": "up_to_4y_old", + "long": "Up to 4 Years Old", + "short": "<4y" + }, + "_5m": { + "id": "up_to_5m_old", + "long": "Up to 5 Months Old", + "short": "<5m" + }, + "_5y": { + "id": "up_to_5y_old", + "long": "Up to 5 Years Old", + "short": "<5y" + }, + "_6m": { + "id": "up_to_6m_old", + "long": "Up to 6 Months Old", + "short": "<6m" + }, + "_6y": { + "id": "up_to_6y_old", + "long": "Up to 6 Years Old", + "short": "<6y" + }, + "_7y": { + "id": "up_to_7y_old", + "long": "Up to 7 Years Old", + "short": "<7y" + }, + "_8y": { + "id": "up_to_8y_old", + "long": "Up to 8 Years Old", + "short": "<8y" } }); MIN_AGE_NAMES = /** @type {const} */ ({ - "_1d": { - "id": "at_least_1d_old", - "short": "1d+", - "long": "At Least 1 Day Old" - }, - "_1w": { - "id": "at_least_1w_old", - "short": "1w+", - "long": "At Least 1 Week Old" - }, - "_1m": { - "id": "at_least_1m_old", - "short": "1m+", - "long": "At Least 1 Month Old" - }, - "_2m": { - "id": "at_least_2m_old", - "short": "2m+", - "long": "At Least 2 Months Old" - }, - "_3m": { - "id": "at_least_3m_old", - "short": "3m+", - "long": "At Least 3 Months Old" - }, - "_4m": { - "id": "at_least_4m_old", - "short": "4m+", - "long": "At Least 4 Months Old" - }, - "_5m": { - "id": "at_least_5m_old", - "short": "5m+", - "long": "At Least 5 Months Old" - }, - "_6m": { - "id": "at_least_6m_old", - "short": "6m+", - "long": "At Least 6 Months Old" - }, - "_1y": { - "id": "at_least_1y_old", - "short": "1y+", - "long": "At Least 1 Year Old" - }, - "_2y": { - "id": "at_least_2y_old", - "short": "2y+", - "long": "At Least 2 Years Old" - }, - "_3y": { - "id": "at_least_3y_old", - "short": "3y+", - "long": "At Least 3 Years Old" - }, - "_4y": { - "id": "at_least_4y_old", - "short": "4y+", - "long": "At Least 4 Years Old" - }, - "_5y": { - "id": "at_least_5y_old", - "short": "5y+", - "long": "At Least 5 Years Old" - }, - "_6y": { - "id": "at_least_6y_old", - "short": "6y+", - "long": "At Least 6 Years Old" - }, - "_7y": { - "id": "at_least_7y_old", - "short": "7y+", - "long": "At Least 7 Years Old" - }, - "_8y": { - "id": "at_least_8y_old", - "short": "8y+", - "long": "At Least 8 Years Old" - }, "_10y": { "id": "at_least_10y_old", - "short": "10y+", - "long": "At Least 10 Years Old" + "long": "At Least 10 Years Old", + "short": "10y+" }, "_12y": { "id": "at_least_12y_old", - "short": "12y+", - "long": "At Least 12 Years Old" + "long": "At Least 12 Years Old", + "short": "12y+" + }, + "_1d": { + "id": "at_least_1d_old", + "long": "At Least 1 Day Old", + "short": "1d+" + }, + "_1m": { + "id": "at_least_1m_old", + "long": "At Least 1 Month Old", + "short": "1m+" + }, + "_1w": { + "id": "at_least_1w_old", + "long": "At Least 1 Week Old", + "short": "1w+" + }, + "_1y": { + "id": "at_least_1y_old", + "long": "At Least 1 Year Old", + "short": "1y+" + }, + "_2m": { + "id": "at_least_2m_old", + "long": "At Least 2 Months Old", + "short": "2m+" + }, + "_2y": { + "id": "at_least_2y_old", + "long": "At Least 2 Years Old", + "short": "2y+" + }, + "_3m": { + "id": "at_least_3m_old", + "long": "At Least 3 Months Old", + "short": "3m+" + }, + "_3y": { + "id": "at_least_3y_old", + "long": "At Least 3 Years Old", + "short": "3y+" + }, + "_4m": { + "id": "at_least_4m_old", + "long": "At Least 4 Months Old", + "short": "4m+" + }, + "_4y": { + "id": "at_least_4y_old", + "long": "At Least 4 Years Old", + "short": "4y+" + }, + "_5m": { + "id": "at_least_5m_old", + "long": "At Least 5 Months Old", + "short": "5m+" + }, + "_5y": { + "id": "at_least_5y_old", + "long": "At Least 5 Years Old", + "short": "5y+" + }, + "_6m": { + "id": "at_least_6m_old", + "long": "At Least 6 Months Old", + "short": "6m+" + }, + "_6y": { + "id": "at_least_6y_old", + "long": "At Least 6 Years Old", + "short": "6y+" + }, + "_7y": { + "id": "at_least_7y_old", + "long": "At Least 7 Years Old", + "short": "7y+" + }, + "_8y": { + "id": "at_least_8y_old", + "long": "At Least 8 Years Old", + "short": "8y+" } }); AMOUNT_RANGE_NAMES = /** @type {const} */ ({ "_0sats": { "id": "with_0sats", - "short": "0 sats", - "long": "0 Sats" - }, - "_1satTo10sats": { - "id": "above_1sat_under_10sats", - "short": "1-10 sats", - "long": "1 to 10 Sats" - }, - "_10satsTo100sats": { - "id": "above_10sats_under_100sats", - "short": "10-100 sats", - "long": "10 to 100 Sats" - }, - "_100satsTo1kSats": { - "id": "above_100sats_under_1k_sats", - "short": "100-1k sats", - "long": "100 to 1K Sats" - }, - "_1kSatsTo10kSats": { - "id": "above_1k_sats_under_10k_sats", - "short": "1k-10k sats", - "long": "1K to 10K Sats" - }, - "_10kSatsTo100kSats": { - "id": "above_10k_sats_under_100k_sats", - "short": "10k-100k sats", - "long": "10K to 100K Sats" - }, - "_100kSatsTo1mSats": { - "id": "above_100k_sats_under_1m_sats", - "short": "100k-1M sats", - "long": "100K to 1M Sats" - }, - "_1mSatsTo10mSats": { - "id": "above_1m_sats_under_10m_sats", - "short": "1M-10M sats", - "long": "1M to 10M Sats" - }, - "_10mSatsTo1btc": { - "id": "above_10m_sats_under_1btc", - "short": "0.1-1 BTC", - "long": "0.1 to 1 BTC" - }, - "_1btcTo10btc": { - "id": "above_1btc_under_10btc", - "short": "1-10 BTC", - "long": "1 to 10 BTC" - }, - "_10btcTo100btc": { - "id": "above_10btc_under_100btc", - "short": "10-100 BTC", - "long": "10 to 100 BTC" + "long": "0 Sats", + "short": "0 sats" }, "_100btcTo1kBtc": { "id": "above_100btc_under_1k_btc", - "short": "100-1k BTC", - "long": "100 to 1K BTC" - }, - "_1kBtcTo10kBtc": { - "id": "above_1k_btc_under_10k_btc", - "short": "1k-10k BTC", - "long": "1K to 10K BTC" - }, - "_10kBtcTo100kBtc": { - "id": "above_10k_btc_under_100k_btc", - "short": "10k-100k BTC", - "long": "10K to 100K BTC" + "long": "100 to 1K BTC", + "short": "100-1k BTC" }, "_100kBtcOrMore": { "id": "above_100k_btc", - "short": "100k+ BTC", - "long": "100K+ BTC" + "long": "100K+ BTC", + "short": "100k+ BTC" + }, + "_100kSatsTo1mSats": { + "id": "above_100k_sats_under_1m_sats", + "long": "100K to 1M Sats", + "short": "100k-1M sats" + }, + "_100satsTo1kSats": { + "id": "above_100sats_under_1k_sats", + "long": "100 to 1K Sats", + "short": "100-1k sats" + }, + "_10btcTo100btc": { + "id": "above_10btc_under_100btc", + "long": "10 to 100 BTC", + "short": "10-100 BTC" + }, + "_10kBtcTo100kBtc": { + "id": "above_10k_btc_under_100k_btc", + "long": "10K to 100K BTC", + "short": "10k-100k BTC" + }, + "_10kSatsTo100kSats": { + "id": "above_10k_sats_under_100k_sats", + "long": "10K to 100K Sats", + "short": "10k-100k sats" + }, + "_10mSatsTo1btc": { + "id": "above_10m_sats_under_1btc", + "long": "0.1 to 1 BTC", + "short": "0.1-1 BTC" + }, + "_10satsTo100sats": { + "id": "above_10sats_under_100sats", + "long": "10 to 100 Sats", + "short": "10-100 sats" + }, + "_1btcTo10btc": { + "id": "above_1btc_under_10btc", + "long": "1 to 10 BTC", + "short": "1-10 BTC" + }, + "_1kBtcTo10kBtc": { + "id": "above_1k_btc_under_10k_btc", + "long": "1K to 10K BTC", + "short": "1k-10k BTC" + }, + "_1kSatsTo10kSats": { + "id": "above_1k_sats_under_10k_sats", + "long": "1K to 10K Sats", + "short": "1k-10k sats" + }, + "_1mSatsTo10mSats": { + "id": "above_1m_sats_under_10m_sats", + "long": "1M to 10M Sats", + "short": "1M-10M sats" + }, + "_1satTo10sats": { + "id": "above_1sat_under_10sats", + "long": "1 to 10 Sats", + "short": "1-10 sats" } }); GE_AMOUNT_NAMES = /** @type {const} */ ({ - "_1sat": { - "id": "above_1sat", - "short": "1+ sats", - "long": "Above 1 Sat" - }, - "_10sats": { - "id": "above_10sats", - "short": "10+ sats", - "long": "Above 10 Sats" - }, - "_100sats": { - "id": "above_100sats", - "short": "100+ sats", - "long": "Above 100 Sats" - }, - "_1kSats": { - "id": "above_1k_sats", - "short": "1k+ sats", - "long": "Above 1K Sats" - }, - "_10kSats": { - "id": "above_10k_sats", - "short": "10k+ sats", - "long": "Above 10K Sats" + "_100btc": { + "id": "above_100btc", + "long": "Above 100 BTC", + "short": "100+ BTC" }, "_100kSats": { "id": "above_100k_sats", - "short": "100k+ sats", - "long": "Above 100K Sats" + "long": "Above 100K Sats", + "short": "100k+ sats" }, - "_1mSats": { - "id": "above_1m_sats", - "short": "1M+ sats", - "long": "Above 1M Sats" - }, - "_10mSats": { - "id": "above_10m_sats", - "short": "0.1+ BTC", - "long": "Above 0.1 BTC" - }, - "_1btc": { - "id": "above_1btc", - "short": "1+ BTC", - "long": "Above 1 BTC" + "_100sats": { + "id": "above_100sats", + "long": "Above 100 Sats", + "short": "100+ sats" }, "_10btc": { "id": "above_10btc", - "short": "10+ BTC", - "long": "Above 10 BTC" - }, - "_100btc": { - "id": "above_100btc", - "short": "100+ BTC", - "long": "Above 100 BTC" - }, - "_1kBtc": { - "id": "above_1k_btc", - "short": "1k+ BTC", - "long": "Above 1K BTC" + "long": "Above 10 BTC", + "short": "10+ BTC" }, "_10kBtc": { "id": "above_10k_btc", - "short": "10k+ BTC", - "long": "Above 10K BTC" + "long": "Above 10K BTC", + "short": "10k+ BTC" + }, + "_10kSats": { + "id": "above_10k_sats", + "long": "Above 10K Sats", + "short": "10k+ sats" + }, + "_10mSats": { + "id": "above_10m_sats", + "long": "Above 0.1 BTC", + "short": "0.1+ BTC" + }, + "_10sats": { + "id": "above_10sats", + "long": "Above 10 Sats", + "short": "10+ sats" + }, + "_1btc": { + "id": "above_1btc", + "long": "Above 1 BTC", + "short": "1+ BTC" + }, + "_1kBtc": { + "id": "above_1k_btc", + "long": "Above 1K BTC", + "short": "1k+ BTC" + }, + "_1kSats": { + "id": "above_1k_sats", + "long": "Above 1K Sats", + "short": "1k+ sats" + }, + "_1mSats": { + "id": "above_1m_sats", + "long": "Above 1M Sats", + "short": "1M+ sats" + }, + "_1sat": { + "id": "above_1sat", + "long": "Above 1 Sat", + "short": "1+ sats" } }); LT_AMOUNT_NAMES = /** @type {const} */ ({ - "_10sats": { - "id": "under_10sats", - "short": "<10 sats", - "long": "Under 10 Sats" - }, - "_100sats": { - "id": "under_100sats", - "short": "<100 sats", - "long": "Under 100 Sats" - }, - "_1kSats": { - "id": "under_1k_sats", - "short": "<1k sats", - "long": "Under 1K Sats" - }, - "_10kSats": { - "id": "under_10k_sats", - "short": "<10k sats", - "long": "Under 10K Sats" - }, - "_100kSats": { - "id": "under_100k_sats", - "short": "<100k sats", - "long": "Under 100K Sats" - }, - "_1mSats": { - "id": "under_1m_sats", - "short": "<1M sats", - "long": "Under 1M Sats" - }, - "_10mSats": { - "id": "under_10m_sats", - "short": "<0.1 BTC", - "long": "Under 0.1 BTC" - }, - "_1btc": { - "id": "under_1btc", - "short": "<1 BTC", - "long": "Under 1 BTC" - }, - "_10btc": { - "id": "under_10btc", - "short": "<10 BTC", - "long": "Under 10 BTC" - }, "_100btc": { "id": "under_100btc", - "short": "<100 BTC", - "long": "Under 100 BTC" - }, - "_1kBtc": { - "id": "under_1k_btc", - "short": "<1k BTC", - "long": "Under 1K BTC" - }, - "_10kBtc": { - "id": "under_10k_btc", - "short": "<10k BTC", - "long": "Under 10K BTC" + "long": "Under 100 BTC", + "short": "<100 BTC" }, "_100kBtc": { "id": "under_100k_btc", - "short": "<100k BTC", - "long": "Under 100K BTC" + "long": "Under 100K BTC", + "short": "<100k BTC" + }, + "_100kSats": { + "id": "under_100k_sats", + "long": "Under 100K Sats", + "short": "<100k sats" + }, + "_100sats": { + "id": "under_100sats", + "long": "Under 100 Sats", + "short": "<100 sats" + }, + "_10btc": { + "id": "under_10btc", + "long": "Under 10 BTC", + "short": "<10 BTC" + }, + "_10kBtc": { + "id": "under_10k_btc", + "long": "Under 10K BTC", + "short": "<10k BTC" + }, + "_10kSats": { + "id": "under_10k_sats", + "long": "Under 10K Sats", + "short": "<10k sats" + }, + "_10mSats": { + "id": "under_10m_sats", + "long": "Under 0.1 BTC", + "short": "<0.1 BTC" + }, + "_10sats": { + "id": "under_10sats", + "long": "Under 10 Sats", + "short": "<10 sats" + }, + "_1btc": { + "id": "under_1btc", + "long": "Under 1 BTC", + "short": "<1 BTC" + }, + "_1kBtc": { + "id": "under_1k_btc", + "long": "Under 1K BTC", + "short": "<1k BTC" + }, + "_1kSats": { + "id": "under_1k_sats", + "long": "Under 1K Sats", + "short": "<1k sats" + }, + "_1mSats": { + "id": "under_1m_sats", + "long": "Under 1M Sats", + "short": "<1M sats" } }); @@ -5762,928 +5442,896 @@ class BrkClient extends BrkClientBase { */ _buildTree(basePath) { return { - computed: { - blocks: { - count: { - _1mBlockCount: createMetricPattern5(this, '1m_block_count'), - _1wBlockCount: createMetricPattern5(this, '1w_block_count'), - _1yBlockCount: createMetricPattern5(this, '1y_block_count'), - _24hBlockCount: createMetricPattern26(this, '24h_block_count'), - blockCount: createBlockCountPattern(this, 'block_count'), - blockCountTarget: createMetricPattern5(this, 'block_count_target') - }, - difficulty: { - blocksBeforeNextDifficultyAdjustment: createMetricPattern1(this, 'blocks_before_next_difficulty_adjustment'), - daysBeforeNextDifficultyAdjustment: createMetricPattern1(this, 'days_before_next_difficulty_adjustment'), - difficultyepoch: createMetricPattern5(this, 'difficultyepoch') - }, - halving: { - blocksBeforeNextHalving: createMetricPattern1(this, 'blocks_before_next_halving'), - daysBeforeNextHalving: createMetricPattern1(this, 'days_before_next_halving'), - halvingepoch: createMetricPattern5(this, 'halvingepoch') - }, - interval: { - blockInterval: createBlockIntervalPattern(this, 'block_interval'), - interval: createMetricPattern26(this, 'interval') - }, - mining: { - difficulty: createMetricPattern2(this, 'difficulty'), - difficultyAdjustment: createDifficultyAdjustmentPattern(this, 'difficulty_adjustment'), - difficultyAsHash: createMetricPattern1(this, 'difficulty_as_hash'), - hashPricePhs: createMetricPattern1(this, 'hash_price_phs'), - hashPricePhsMin: createMetricPattern1(this, 'hash_price_phs_min'), - hashPriceRebound: createMetricPattern1(this, 'hash_price_rebound'), - hashPriceThs: createMetricPattern1(this, 'hash_price_ths'), - hashPriceThsMin: createMetricPattern1(this, 'hash_price_ths_min'), - hashRate: createMetricPattern1(this, 'hash_rate'), - hashRate1mSma: createMetricPattern5(this, 'hash_rate_1m_sma'), - hashRate1wSma: createMetricPattern5(this, 'hash_rate_1w_sma'), - hashRate1ySma: createMetricPattern5(this, 'hash_rate_1y_sma'), - hashRate2mSma: createMetricPattern5(this, 'hash_rate_2m_sma'), - hashValuePhs: createMetricPattern1(this, 'hash_value_phs'), - hashValuePhsMin: createMetricPattern1(this, 'hash_value_phs_min'), - hashValueRebound: createMetricPattern1(this, 'hash_value_rebound'), - hashValueThs: createMetricPattern1(this, 'hash_value_ths'), - hashValueThsMin: createMetricPattern1(this, 'hash_value_ths_min') - }, - rewards: { - _24hCoinbaseSum: createMetricPattern26(this, '24h_coinbase_sum'), - _24hCoinbaseUsdSum: createMetricPattern26(this, '24h_coinbase_usd_sum'), - coinbase: createCoinbasePattern(this, 'coinbase'), - feeDominance: createMetricPattern22(this, 'fee_dominance'), - subsidy: createCoinbasePattern(this, 'subsidy'), - subsidyDominance: createMetricPattern22(this, 'subsidy_dominance'), - subsidyUsd1ySma: createMetricPattern5(this, 'subsidy_usd_1y_sma'), - unclaimedRewards: createUnclaimedRewardsPattern(this, 'unclaimed_rewards') - }, - size: { - blockSize: createBlockSizePattern(this, 'block_size'), - blockVbytes: createBlockSizePattern(this, 'block_vbytes'), - vbytes: createMetricPattern26(this, 'vbytes') - }, - time: { - date: createMetricPattern26(this, 'date'), - dateFixed: createMetricPattern26(this, 'date_fixed'), - timestamp: createMetricPattern2(this, 'timestamp'), - timestampFixed: createMetricPattern26(this, 'timestamp_fixed') - }, - weight: { - blockFullness: createBitcoinPattern(this, 'block_fullness'), - blockWeight: createBlockSizePattern(this, 'block_weight') - } + addresses: { + firstP2aaddressindex: createMetricPattern23(this, 'first_p2aaddressindex'), + firstP2pk33addressindex: createMetricPattern23(this, 'first_p2pk33addressindex'), + firstP2pk65addressindex: createMetricPattern23(this, 'first_p2pk65addressindex'), + firstP2pkhaddressindex: createMetricPattern23(this, 'first_p2pkhaddressindex'), + firstP2shaddressindex: createMetricPattern23(this, 'first_p2shaddressindex'), + firstP2traddressindex: createMetricPattern23(this, 'first_p2traddressindex'), + firstP2wpkhaddressindex: createMetricPattern23(this, 'first_p2wpkhaddressindex'), + firstP2wshaddressindex: createMetricPattern23(this, 'first_p2wshaddressindex'), + p2abytes: createMetricPattern27(this, 'p2abytes'), + p2pk33bytes: createMetricPattern29(this, 'p2pk33bytes'), + p2pk65bytes: createMetricPattern30(this, 'p2pk65bytes'), + p2pkhbytes: createMetricPattern31(this, 'p2pkhbytes'), + p2shbytes: createMetricPattern32(this, 'p2shbytes'), + p2trbytes: createMetricPattern33(this, 'p2trbytes'), + p2wpkhbytes: createMetricPattern34(this, 'p2wpkhbytes'), + p2wshbytes: createMetricPattern35(this, 'p2wshbytes') + }, + blocks: { + blockhash: createMetricPattern23(this, 'blockhash'), + count: { + _1mBlockCount: createMetricPattern1(this, '1m_block_count'), + _1mStart: createMetricPattern23(this, '1m_start'), + _1wBlockCount: createMetricPattern1(this, '1w_block_count'), + _1wStart: createMetricPattern23(this, '1w_start'), + _1yBlockCount: createMetricPattern1(this, '1y_block_count'), + _1yStart: createMetricPattern23(this, '1y_start'), + _24hBlockCount: createMetricPattern1(this, '24h_block_count'), + _24hStart: createMetricPattern23(this, '24h_start'), + blockCount: createBlockCountPattern(this, 'block_count'), + blockCountTarget: createMetricPattern4(this, 'block_count_target') }, - cointime: { - activity: { - activityToVaultednessRatio: createMetricPattern1(this, 'activity_to_vaultedness_ratio'), - coinblocksCreated: createBlockCountPattern(this, 'coinblocks_created'), - coinblocksStored: createBlockCountPattern(this, 'coinblocks_stored'), - liveliness: createMetricPattern1(this, 'liveliness'), - vaultedness: createMetricPattern1(this, 'vaultedness') - }, - adjusted: { - cointimeAdjInflationRate: createMetricPattern5(this, 'cointime_adj_inflation_rate'), - cointimeAdjTxBtcVelocity: createMetricPattern5(this, 'cointime_adj_tx_btc_velocity'), - cointimeAdjTxUsdVelocity: createMetricPattern5(this, 'cointime_adj_tx_usd_velocity') - }, - cap: { - activeCap: createMetricPattern1(this, 'active_cap'), - cointimeCap: createMetricPattern1(this, 'cointime_cap'), - investorCap: createMetricPattern1(this, 'investor_cap'), - thermoCap: createMetricPattern1(this, 'thermo_cap'), - vaultedCap: createMetricPattern1(this, 'vaulted_cap') - }, - pricing: { - activePrice: createMetricPattern1(this, 'active_price'), - activePriceRatio: createActivePriceRatioPattern(this, 'active_price_ratio'), - cointimePrice: createMetricPattern1(this, 'cointime_price'), - cointimePriceRatio: createActivePriceRatioPattern(this, 'cointime_price_ratio'), - trueMarketMean: createMetricPattern1(this, 'true_market_mean'), - trueMarketMeanRatio: createActivePriceRatioPattern(this, 'true_market_mean_ratio'), - vaultedPrice: createMetricPattern1(this, 'vaulted_price'), - vaultedPriceRatio: createActivePriceRatioPattern(this, 'vaulted_price_ratio') - }, - supply: { - activeSupply: createActiveSupplyPattern(this, 'active_supply'), - vaultedSupply: createActiveSupplyPattern(this, 'vaulted_supply') - }, - value: { - cointimeValueCreated: createBlockCountPattern(this, 'cointime_value_created'), - cointimeValueDestroyed: createBlockCountPattern(this, 'cointime_value_destroyed'), - cointimeValueStored: createBlockCountPattern(this, 'cointime_value_stored') - } + difficulty: { + base: createMetricPattern23(this, 'difficulty'), + blocksBeforeNextDifficultyAdjustment: createMetricPattern1(this, 'blocks_before_next_difficulty_adjustment'), + daysBeforeNextDifficultyAdjustment: createMetricPattern1(this, 'days_before_next_difficulty_adjustment'), + difficultyepoch: createMetricPattern4(this, 'difficultyepoch') }, - constants: { - constant0: createMetricPattern3(this, 'constant_0'), - constant1: createMetricPattern3(this, 'constant_1'), - constant100: createMetricPattern3(this, 'constant_100'), - constant2: createMetricPattern3(this, 'constant_2'), - constant20: createMetricPattern3(this, 'constant_20'), - constant3: createMetricPattern3(this, 'constant_3'), - constant30: createMetricPattern3(this, 'constant_30'), - constant382: createMetricPattern3(this, 'constant_38_2'), - constant4: createMetricPattern3(this, 'constant_4'), - constant50: createMetricPattern3(this, 'constant_50'), - constant600: createMetricPattern3(this, 'constant_600'), - constant618: createMetricPattern3(this, 'constant_61_8'), - constant70: createMetricPattern3(this, 'constant_70'), - constant80: createMetricPattern3(this, 'constant_80'), - constantMinus1: createMetricPattern3(this, 'constant_minus_1'), - constantMinus2: createMetricPattern3(this, 'constant_minus_2'), - constantMinus3: createMetricPattern3(this, 'constant_minus_3'), - constantMinus4: createMetricPattern3(this, 'constant_minus_4') + halving: { + blocksBeforeNextHalving: createMetricPattern1(this, 'blocks_before_next_halving'), + daysBeforeNextHalving: createMetricPattern1(this, 'days_before_next_halving'), + halvingepoch: createMetricPattern4(this, 'halvingepoch') }, - distribution: { - addrCount: createMetricPattern1(this, 'addr_count'), - addressCohorts: { - amountRange: { - _0sats: create_0satsPattern(this, 'addrs_with_0sats'), - _100btcTo1kBtc: create_0satsPattern(this, 'addrs_above_100btc_under_1k_btc'), - _100kBtcOrMore: create_0satsPattern(this, 'addrs_above_100k_btc'), - _100kSatsTo1mSats: create_0satsPattern(this, 'addrs_above_100k_sats_under_1m_sats'), - _100satsTo1kSats: create_0satsPattern(this, 'addrs_above_100sats_under_1k_sats'), - _10btcTo100btc: create_0satsPattern(this, 'addrs_above_10btc_under_100btc'), - _10kBtcTo100kBtc: create_0satsPattern(this, 'addrs_above_10k_btc_under_100k_btc'), - _10kSatsTo100kSats: create_0satsPattern(this, 'addrs_above_10k_sats_under_100k_sats'), - _10mSatsTo1btc: create_0satsPattern(this, 'addrs_above_10m_sats_under_1btc'), - _10satsTo100sats: create_0satsPattern(this, 'addrs_above_10sats_under_100sats'), - _1btcTo10btc: create_0satsPattern(this, 'addrs_above_1btc_under_10btc'), - _1kBtcTo10kBtc: create_0satsPattern(this, 'addrs_above_1k_btc_under_10k_btc'), - _1kSatsTo10kSats: create_0satsPattern(this, 'addrs_above_1k_sats_under_10k_sats'), - _1mSatsTo10mSats: create_0satsPattern(this, 'addrs_above_1m_sats_under_10m_sats'), - _1satTo10sats: create_0satsPattern(this, 'addrs_above_1sat_under_10sats') - }, - geAmount: { - _100btc: create_0satsPattern(this, 'addrs_above_100btc'), - _100kSats: create_0satsPattern(this, 'addrs_above_100k_sats'), - _100sats: create_0satsPattern(this, 'addrs_above_100sats'), - _10btc: create_0satsPattern(this, 'addrs_above_10btc'), - _10kBtc: create_0satsPattern(this, 'addrs_above_10k_btc'), - _10kSats: create_0satsPattern(this, 'addrs_above_10k_sats'), - _10mSats: create_0satsPattern(this, 'addrs_above_10m_sats'), - _10sats: create_0satsPattern(this, 'addrs_above_10sats'), - _1btc: create_0satsPattern(this, 'addrs_above_1btc'), - _1kBtc: create_0satsPattern(this, 'addrs_above_1k_btc'), - _1kSats: create_0satsPattern(this, 'addrs_above_1k_sats'), - _1mSats: create_0satsPattern(this, 'addrs_above_1m_sats'), - _1sat: create_0satsPattern(this, 'addrs_above_1sat') - }, - ltAmount: { - _100btc: create_0satsPattern(this, 'addrs_under_100btc'), - _100kBtc: create_0satsPattern(this, 'addrs_under_100k_btc'), - _100kSats: create_0satsPattern(this, 'addrs_under_100k_sats'), - _100sats: create_0satsPattern(this, 'addrs_under_100sats'), - _10btc: create_0satsPattern(this, 'addrs_under_10btc'), - _10kBtc: create_0satsPattern(this, 'addrs_under_10k_btc'), - _10kSats: create_0satsPattern(this, 'addrs_under_10k_sats'), - _10mSats: create_0satsPattern(this, 'addrs_under_10m_sats'), - _10sats: create_0satsPattern(this, 'addrs_under_10sats'), - _1btc: create_0satsPattern(this, 'addrs_under_1btc'), - _1kBtc: create_0satsPattern(this, 'addrs_under_1k_btc'), - _1kSats: create_0satsPattern(this, 'addrs_under_1k_sats'), - _1mSats: create_0satsPattern(this, 'addrs_under_1m_sats') - } - }, - addressesData: { - empty: createMetricPattern42(this, 'emptyaddressdata'), - loaded: createMetricPattern41(this, 'loadedaddressdata') - }, - addresstypeToHeightToAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'addr_count'), - addresstypeToHeightToEmptyAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'empty_addr_count'), - addresstypeToIndexesToAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'addr_count'), - addresstypeToIndexesToEmptyAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'empty_addr_count'), - anyAddressIndexes: createAddresstypeToHeightToAddrCountPattern(this, 'anyaddressindex'), - chainState: createMetricPattern26(this, 'chain'), - emptyAddrCount: createMetricPattern1(this, 'empty_addr_count'), - emptyaddressindex: createMetricPattern42(this, 'emptyaddressindex'), - loadedaddressindex: createMetricPattern41(this, 'loadedaddressindex'), - utxoCohorts: { - ageRange: { - _10yTo12y: create_10yTo12yPattern(this, 'utxos_at_least_10y_up_to_12y_old'), - _12yTo15y: create_10yTo12yPattern(this, 'utxos_at_least_12y_up_to_15y_old'), - _1dTo1w: create_10yTo12yPattern(this, 'utxos_at_least_1d_up_to_1w_old'), - _1hTo1d: create_10yTo12yPattern(this, 'utxos_at_least_1h_up_to_1d_old'), - _1mTo2m: create_10yTo12yPattern(this, 'utxos_at_least_1m_up_to_2m_old'), - _1wTo1m: create_10yTo12yPattern(this, 'utxos_at_least_1w_up_to_1m_old'), - _1yTo2y: create_10yTo12yPattern(this, 'utxos_at_least_1y_up_to_2y_old'), - _2mTo3m: create_10yTo12yPattern(this, 'utxos_at_least_2m_up_to_3m_old'), - _2yTo3y: create_10yTo12yPattern(this, 'utxos_at_least_2y_up_to_3y_old'), - _3mTo4m: create_10yTo12yPattern(this, 'utxos_at_least_3m_up_to_4m_old'), - _3yTo4y: create_10yTo12yPattern(this, 'utxos_at_least_3y_up_to_4y_old'), - _4mTo5m: create_10yTo12yPattern(this, 'utxos_at_least_4m_up_to_5m_old'), - _4yTo5y: create_10yTo12yPattern(this, 'utxos_at_least_4y_up_to_5y_old'), - _5mTo6m: create_10yTo12yPattern(this, 'utxos_at_least_5m_up_to_6m_old'), - _5yTo6y: create_10yTo12yPattern(this, 'utxos_at_least_5y_up_to_6y_old'), - _6mTo1y: create_10yTo12yPattern(this, 'utxos_at_least_6m_up_to_1y_old'), - _6yTo7y: create_10yTo12yPattern(this, 'utxos_at_least_6y_up_to_7y_old'), - _7yTo8y: create_10yTo12yPattern(this, 'utxos_at_least_7y_up_to_8y_old'), - _8yTo10y: create_10yTo12yPattern(this, 'utxos_at_least_8y_up_to_10y_old'), - from15y: create_10yTo12yPattern(this, 'utxos_at_least_15y_old'), - upTo1h: create_10yTo12yPattern(this, 'utxos_up_to_1h_old') - }, - all: { - activity: createActivityPattern2(this, ''), - costBasis: createCostBasisPattern2(this, ''), - realized: createRealizedPattern3(this, ''), - relative: { - negUnrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern3(this, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl'), - netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createMetricPattern3(this, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl'), - supplyInLossRelToOwnSupply: createMetricPattern3(this, 'supply_in_loss_rel_to_own_supply'), - supplyInProfitRelToOwnSupply: createMetricPattern3(this, 'supply_in_profit_rel_to_own_supply'), - unrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern3(this, 'unrealized_loss_rel_to_own_total_unrealized_pnl'), - unrealizedProfitRelToOwnTotalUnrealizedPnl: createMetricPattern3(this, 'unrealized_profit_rel_to_own_total_unrealized_pnl') - }, - supply: createSupplyPattern3(this, ''), - unrealized: createUnrealizedPattern(this, '') - }, - amountRange: { - _0sats: create_0satsPattern2(this, 'utxos_with_0sats'), - _100btcTo1kBtc: create_0satsPattern2(this, 'utxos_above_100btc_under_1k_btc'), - _100kBtcOrMore: create_0satsPattern2(this, 'utxos_above_100k_btc'), - _100kSatsTo1mSats: create_0satsPattern2(this, 'utxos_above_100k_sats_under_1m_sats'), - _100satsTo1kSats: create_0satsPattern2(this, 'utxos_above_100sats_under_1k_sats'), - _10btcTo100btc: create_0satsPattern2(this, 'utxos_above_10btc_under_100btc'), - _10kBtcTo100kBtc: create_0satsPattern2(this, 'utxos_above_10k_btc_under_100k_btc'), - _10kSatsTo100kSats: create_0satsPattern2(this, 'utxos_above_10k_sats_under_100k_sats'), - _10mSatsTo1btc: create_0satsPattern2(this, 'utxos_above_10m_sats_under_1btc'), - _10satsTo100sats: create_0satsPattern2(this, 'utxos_above_10sats_under_100sats'), - _1btcTo10btc: create_0satsPattern2(this, 'utxos_above_1btc_under_10btc'), - _1kBtcTo10kBtc: create_0satsPattern2(this, 'utxos_above_1k_btc_under_10k_btc'), - _1kSatsTo10kSats: create_0satsPattern2(this, 'utxos_above_1k_sats_under_10k_sats'), - _1mSatsTo10mSats: create_0satsPattern2(this, 'utxos_above_1m_sats_under_10m_sats'), - _1satTo10sats: create_0satsPattern2(this, 'utxos_above_1sat_under_10sats') - }, - epoch: { - _0: create_0satsPattern2(this, 'epoch_0'), - _1: create_0satsPattern2(this, 'epoch_1'), - _2: create_0satsPattern2(this, 'epoch_2'), - _3: create_0satsPattern2(this, 'epoch_3'), - _4: create_0satsPattern2(this, 'epoch_4') - }, - geAmount: { - _100btc: create_100btcPattern(this, 'utxos_above_100btc'), - _100kSats: create_100btcPattern(this, 'utxos_above_100k_sats'), - _100sats: create_100btcPattern(this, 'utxos_above_100sats'), - _10btc: create_100btcPattern(this, 'utxos_above_10btc'), - _10kBtc: create_100btcPattern(this, 'utxos_above_10k_btc'), - _10kSats: create_100btcPattern(this, 'utxos_above_10k_sats'), - _10mSats: create_100btcPattern(this, 'utxos_above_10m_sats'), - _10sats: create_100btcPattern(this, 'utxos_above_10sats'), - _1btc: create_100btcPattern(this, 'utxos_above_1btc'), - _1kBtc: create_100btcPattern(this, 'utxos_above_1k_btc'), - _1kSats: create_100btcPattern(this, 'utxos_above_1k_sats'), - _1mSats: create_100btcPattern(this, 'utxos_above_1m_sats'), - _1sat: create_100btcPattern(this, 'utxos_above_1sat') - }, - ltAmount: { - _100btc: create_100btcPattern(this, 'utxos_under_100btc'), - _100kBtc: create_100btcPattern(this, 'utxos_under_100k_btc'), - _100kSats: create_100btcPattern(this, 'utxos_under_100k_sats'), - _100sats: create_100btcPattern(this, 'utxos_under_100sats'), - _10btc: create_100btcPattern(this, 'utxos_under_10btc'), - _10kBtc: create_100btcPattern(this, 'utxos_under_10k_btc'), - _10kSats: create_100btcPattern(this, 'utxos_under_10k_sats'), - _10mSats: create_100btcPattern(this, 'utxos_under_10m_sats'), - _10sats: create_100btcPattern(this, 'utxos_under_10sats'), - _1btc: create_100btcPattern(this, 'utxos_under_1btc'), - _1kBtc: create_100btcPattern(this, 'utxos_under_1k_btc'), - _1kSats: create_100btcPattern(this, 'utxos_under_1k_sats'), - _1mSats: create_100btcPattern(this, 'utxos_under_1m_sats') - }, - maxAge: { - _10y: create_10yPattern(this, 'utxos_up_to_10y_old'), - _12y: create_10yPattern(this, 'utxos_up_to_12y_old'), - _15y: create_10yPattern(this, 'utxos_up_to_15y_old'), - _1m: create_10yPattern(this, 'utxos_up_to_1m_old'), - _1w: create_10yPattern(this, 'utxos_up_to_1w_old'), - _1y: create_10yPattern(this, 'utxos_up_to_1y_old'), - _2m: create_10yPattern(this, 'utxos_up_to_2m_old'), - _2y: create_10yPattern(this, 'utxos_up_to_2y_old'), - _3m: create_10yPattern(this, 'utxos_up_to_3m_old'), - _3y: create_10yPattern(this, 'utxos_up_to_3y_old'), - _4m: create_10yPattern(this, 'utxos_up_to_4m_old'), - _4y: create_10yPattern(this, 'utxos_up_to_4y_old'), - _5m: create_10yPattern(this, 'utxos_up_to_5m_old'), - _5y: create_10yPattern(this, 'utxos_up_to_5y_old'), - _6m: create_10yPattern(this, 'utxos_up_to_6m_old'), - _6y: create_10yPattern(this, 'utxos_up_to_6y_old'), - _7y: create_10yPattern(this, 'utxos_up_to_7y_old'), - _8y: create_10yPattern(this, 'utxos_up_to_8y_old') - }, - minAge: { - _10y: create_100btcPattern(this, 'utxos_at_least_10y_old'), - _12y: create_100btcPattern(this, 'utxos_at_least_12y_old'), - _1d: create_100btcPattern(this, 'utxos_at_least_1d_old'), - _1m: create_100btcPattern(this, 'utxos_at_least_1m_old'), - _1w: create_100btcPattern(this, 'utxos_at_least_1w_old'), - _1y: create_100btcPattern(this, 'utxos_at_least_1y_old'), - _2m: create_100btcPattern(this, 'utxos_at_least_2m_old'), - _2y: create_100btcPattern(this, 'utxos_at_least_2y_old'), - _3m: create_100btcPattern(this, 'utxos_at_least_3m_old'), - _3y: create_100btcPattern(this, 'utxos_at_least_3y_old'), - _4m: create_100btcPattern(this, 'utxos_at_least_4m_old'), - _4y: create_100btcPattern(this, 'utxos_at_least_4y_old'), - _5m: create_100btcPattern(this, 'utxos_at_least_5m_old'), - _5y: create_100btcPattern(this, 'utxos_at_least_5y_old'), - _6m: create_100btcPattern(this, 'utxos_at_least_6m_old'), - _6y: create_100btcPattern(this, 'utxos_at_least_6y_old'), - _7y: create_100btcPattern(this, 'utxos_at_least_7y_old'), - _8y: create_100btcPattern(this, 'utxos_at_least_8y_old') - }, - term: { - long: { - activity: createActivityPattern2(this, 'lth'), - costBasis: createCostBasisPattern2(this, 'lth'), - realized: createRealizedPattern2(this, 'lth'), - relative: createRelativePattern5(this, 'lth'), - supply: createSupplyPattern3(this, 'lth'), - unrealized: createUnrealizedPattern(this, 'lth') - }, - short: { - activity: createActivityPattern2(this, 'sth'), - costBasis: createCostBasisPattern2(this, 'sth'), - realized: createRealizedPattern3(this, 'sth'), - relative: createRelativePattern5(this, 'sth'), - supply: createSupplyPattern3(this, 'sth'), - unrealized: createUnrealizedPattern(this, 'sth') - } - }, - type: { - empty: create_0satsPattern2(this, 'empty_outputs'), - p2a: create_0satsPattern2(this, 'p2a'), - p2ms: create_0satsPattern2(this, 'p2ms'), - p2pk33: create_0satsPattern2(this, 'p2pk33'), - p2pk65: create_0satsPattern2(this, 'p2pk65'), - p2pkh: create_0satsPattern2(this, 'p2pkh'), - p2sh: create_0satsPattern2(this, 'p2sh'), - p2tr: create_0satsPattern2(this, 'p2tr'), - p2wpkh: create_0satsPattern2(this, 'p2wpkh'), - p2wsh: create_0satsPattern2(this, 'p2wsh'), - unknown: create_0satsPattern2(this, 'unknown_outputs') - }, - year: { - _2009: create_0satsPattern2(this, 'year_2009'), - _2010: create_0satsPattern2(this, 'year_2010'), - _2011: create_0satsPattern2(this, 'year_2011'), - _2012: create_0satsPattern2(this, 'year_2012'), - _2013: create_0satsPattern2(this, 'year_2013'), - _2014: create_0satsPattern2(this, 'year_2014'), - _2015: create_0satsPattern2(this, 'year_2015'), - _2016: create_0satsPattern2(this, 'year_2016'), - _2017: create_0satsPattern2(this, 'year_2017'), - _2018: create_0satsPattern2(this, 'year_2018'), - _2019: create_0satsPattern2(this, 'year_2019'), - _2020: create_0satsPattern2(this, 'year_2020'), - _2021: create_0satsPattern2(this, 'year_2021'), - _2022: create_0satsPattern2(this, 'year_2022'), - _2023: create_0satsPattern2(this, 'year_2023'), - _2024: create_0satsPattern2(this, 'year_2024'), - _2025: create_0satsPattern2(this, 'year_2025'), - _2026: create_0satsPattern2(this, 'year_2026') - } - } + interval: createIntervalPattern(this, 'block_interval'), + mining: { + difficulty: createMetricPattern2(this, 'difficulty'), + difficultyAdjustment: createMetricPattern1(this, 'difficulty_adjustment'), + difficultyAsHash: createMetricPattern1(this, 'difficulty_as_hash'), + hashPricePhs: createMetricPattern1(this, 'hash_price_phs'), + hashPricePhsMin: createMetricPattern1(this, 'hash_price_phs_min'), + hashPriceRebound: createMetricPattern1(this, 'hash_price_rebound'), + hashPriceThs: createMetricPattern1(this, 'hash_price_ths'), + hashPriceThsMin: createMetricPattern1(this, 'hash_price_ths_min'), + hashRate: createMetricPattern1(this, 'hash_rate'), + hashRate1mSma: createMetricPattern4(this, 'hash_rate_1m_sma'), + hashRate1wSma: createMetricPattern4(this, 'hash_rate_1w_sma'), + hashRate1ySma: createMetricPattern4(this, 'hash_rate_1y_sma'), + hashRate2mSma: createMetricPattern4(this, 'hash_rate_2m_sma'), + hashValuePhs: createMetricPattern1(this, 'hash_value_phs'), + hashValuePhsMin: createMetricPattern1(this, 'hash_value_phs_min'), + hashValueRebound: createMetricPattern1(this, 'hash_value_rebound'), + hashValueThs: createMetricPattern1(this, 'hash_value_ths'), + hashValueThsMin: createMetricPattern1(this, 'hash_value_ths_min') }, - indexes: { - address: { - emptyoutputindex: createMetricPattern25(this, 'emptyoutputindex'), - opreturnindex: createMetricPattern28(this, 'opreturnindex'), - p2aaddressindex: createMetricPattern30(this, 'p2aaddressindex'), - p2msoutputindex: createMetricPattern31(this, 'p2msoutputindex'), - p2pk33addressindex: createMetricPattern32(this, 'p2pk33addressindex'), - p2pk65addressindex: createMetricPattern33(this, 'p2pk65addressindex'), - p2pkhaddressindex: createMetricPattern34(this, 'p2pkhaddressindex'), - p2shaddressindex: createMetricPattern35(this, 'p2shaddressindex'), - p2traddressindex: createMetricPattern36(this, 'p2traddressindex'), - p2wpkhaddressindex: createMetricPattern37(this, 'p2wpkhaddressindex'), - p2wshaddressindex: createMetricPattern38(this, 'p2wshaddressindex'), - unknownoutputindex: createMetricPattern40(this, 'unknownoutputindex') - }, - block: { - dateindex: createMetricPattern26(this, 'dateindex'), - difficultyepoch: createMetricPattern15(this, 'difficultyepoch'), - firstHeight: createMetricPattern14(this, 'first_height'), - halvingepoch: createMetricPattern16(this, 'halvingepoch'), - height: createMetricPattern26(this, 'height'), - heightCount: createMetricPattern24(this, 'height_count'), - txindexCount: createMetricPattern26(this, 'txindex_count') - }, - time: { - date: createMetricPattern22(this, 'date'), - dateindex: createMetricPattern22(this, 'dateindex'), - dateindexCount: createMetricPattern20(this, 'dateindex_count'), - decadeindex: createMetricPattern13(this, 'decadeindex'), - firstDateindex: createMetricPattern20(this, 'first_dateindex'), - firstHeight: createMetricPattern22(this, 'first_height'), - firstMonthindex: createMetricPattern9(this, 'first_monthindex'), - firstYearindex: createMetricPattern23(this, 'first_yearindex'), - heightCount: createMetricPattern22(this, 'height_count'), - monthindex: createMetricPattern11(this, 'monthindex'), - monthindexCount: createMetricPattern9(this, 'monthindex_count'), - quarterindex: createMetricPattern18(this, 'quarterindex'), - semesterindex: createMetricPattern19(this, 'semesterindex'), - weekindex: createMetricPattern12(this, 'weekindex'), - yearindex: createMetricPattern21(this, 'yearindex'), - yearindexCount: createMetricPattern23(this, 'yearindex_count') - }, - transaction: { - inputCount: createMetricPattern39(this, 'input_count'), - outputCount: createMetricPattern39(this, 'output_count'), - txindex: createMetricPattern39(this, 'txindex'), - txinindex: createMetricPattern27(this, 'txinindex'), - txoutindex: createMetricPattern29(this, 'txoutindex') - } + rewards: { + _24hCoinbaseSum: createMetricPattern23(this, '24h_coinbase_sum'), + _24hCoinbaseUsdSum: createMetricPattern23(this, '24h_coinbase_usd_sum'), + coinbase: createCoinbasePattern(this, 'coinbase'), + feeDominance: createMetricPattern19(this, 'fee_dominance'), + subsidy: createCoinbasePattern(this, 'subsidy'), + subsidyDominance: createMetricPattern19(this, 'subsidy_dominance'), + subsidyUsd1ySma: createMetricPattern4(this, 'subsidy_usd_1y_sma'), + unclaimedRewards: createUnclaimedRewardsPattern(this, 'unclaimed_rewards') }, - inputs: { - count: { - count: createCountPattern2(this, 'input_count') - }, - spent: { - txoutindex: createMetricPattern27(this, 'txoutindex'), - value: createMetricPattern27(this, 'value') - } + size: { + blockSize: createBlockSizePattern(this, 'block_size'), + blockVbytes: createBlockSizePattern(this, 'block_vbytes'), + vbytes: createMetricPattern23(this, 'vbytes') }, - market: { - ath: { - daysSincePriceAth: createMetricPattern5(this, 'days_since_price_ath'), - maxDaysBetweenPriceAths: createMetricPattern5(this, 'max_days_between_price_aths'), - maxYearsBetweenPriceAths: createMetricPattern5(this, 'max_years_between_price_aths'), - priceAth: createMetricPattern3(this, 'price_ath'), - priceDrawdown: createMetricPattern3(this, 'price_drawdown'), - yearsSincePriceAth: createMetricPattern5(this, 'years_since_price_ath') - }, - dca: { - classAveragePrice: createClassAveragePricePattern(this, 'dca_class'), - classReturns: createClassAveragePricePattern(this, 'dca_class'), - classStack: { - _2015: createActiveSupplyPattern(this, 'dca_class_2015_stack'), - _2016: createActiveSupplyPattern(this, 'dca_class_2016_stack'), - _2017: createActiveSupplyPattern(this, 'dca_class_2017_stack'), - _2018: createActiveSupplyPattern(this, 'dca_class_2018_stack'), - _2019: createActiveSupplyPattern(this, 'dca_class_2019_stack'), - _2020: createActiveSupplyPattern(this, 'dca_class_2020_stack'), - _2021: createActiveSupplyPattern(this, 'dca_class_2021_stack'), - _2022: createActiveSupplyPattern(this, 'dca_class_2022_stack'), - _2023: createActiveSupplyPattern(this, 'dca_class_2023_stack'), - _2024: createActiveSupplyPattern(this, 'dca_class_2024_stack'), - _2025: createActiveSupplyPattern(this, 'dca_class_2025_stack') - }, - periodAveragePrice: createPeriodAveragePricePattern(this, 'dca_average_price'), - periodCagr: createPeriodCagrPattern(this, 'dca_cagr'), - periodLumpSumStack: createPeriodLumpSumStackPattern(this, ''), - periodReturns: createPeriodAveragePricePattern(this, 'dca_returns'), - periodStack: createPeriodLumpSumStackPattern(this, '') - }, - indicators: { - gini: createMetricPattern22(this, 'gini'), - macdHistogram: createMetricPattern22(this, 'macd_histogram'), - macdLine: createMetricPattern22(this, 'macd_line'), - macdSignal: createMetricPattern22(this, 'macd_signal'), - nvt: createMetricPattern5(this, 'nvt'), - piCycle: createMetricPattern22(this, 'pi_cycle'), - puellMultiple: createMetricPattern5(this, 'puell_multiple'), - rsi14d: createMetricPattern22(this, 'rsi_14d'), - rsi14dMax: createMetricPattern22(this, 'rsi_14d_max'), - rsi14dMin: createMetricPattern22(this, 'rsi_14d_min'), - rsiAverageGain14d: createMetricPattern22(this, 'rsi_average_gain_14d'), - rsiAverageLoss14d: createMetricPattern22(this, 'rsi_average_loss_14d'), - rsiGains: createMetricPattern22(this, 'rsi_gains'), - rsiLosses: createMetricPattern22(this, 'rsi_losses'), - stochD: createMetricPattern22(this, 'stoch_d'), - stochK: createMetricPattern22(this, 'stoch_k'), - stochRsi: createMetricPattern22(this, 'stoch_rsi'), - stochRsiD: createMetricPattern22(this, 'stoch_rsi_d'), - stochRsiK: createMetricPattern22(this, 'stoch_rsi_k') - }, - lookback: { - priceAgo: createPriceAgoPattern(this, 'price') - }, - movingAverage: { - price111dSma: createPrice111dSmaPattern(this, 'price_111d_sma'), - price12dEma: createPrice111dSmaPattern(this, 'price_12d_ema'), - price13dEma: createPrice111dSmaPattern(this, 'price_13d_ema'), - price13dSma: createPrice111dSmaPattern(this, 'price_13d_sma'), - price144dEma: createPrice111dSmaPattern(this, 'price_144d_ema'), - price144dSma: createPrice111dSmaPattern(this, 'price_144d_sma'), - price1mEma: createPrice111dSmaPattern(this, 'price_1m_ema'), - price1mSma: createPrice111dSmaPattern(this, 'price_1m_sma'), - price1wEma: createPrice111dSmaPattern(this, 'price_1w_ema'), - price1wSma: createPrice111dSmaPattern(this, 'price_1w_sma'), - price1yEma: createPrice111dSmaPattern(this, 'price_1y_ema'), - price1ySma: createPrice111dSmaPattern(this, 'price_1y_sma'), - price200dEma: createPrice111dSmaPattern(this, 'price_200d_ema'), - price200dSma: createPrice111dSmaPattern(this, 'price_200d_sma'), - price200dSmaX08: createMetricPattern5(this, 'price_200d_sma_x0_8'), - price200dSmaX24: createMetricPattern5(this, 'price_200d_sma_x2_4'), - price200wEma: createPrice111dSmaPattern(this, 'price_200w_ema'), - price200wSma: createPrice111dSmaPattern(this, 'price_200w_sma'), - price21dEma: createPrice111dSmaPattern(this, 'price_21d_ema'), - price21dSma: createPrice111dSmaPattern(this, 'price_21d_sma'), - price26dEma: createPrice111dSmaPattern(this, 'price_26d_ema'), - price2yEma: createPrice111dSmaPattern(this, 'price_2y_ema'), - price2ySma: createPrice111dSmaPattern(this, 'price_2y_sma'), - price34dEma: createPrice111dSmaPattern(this, 'price_34d_ema'), - price34dSma: createPrice111dSmaPattern(this, 'price_34d_sma'), - price350dSma: createPrice111dSmaPattern(this, 'price_350d_sma'), - price350dSmaX2: createMetricPattern5(this, 'price_350d_sma_x2'), - price4yEma: createPrice111dSmaPattern(this, 'price_4y_ema'), - price4ySma: createPrice111dSmaPattern(this, 'price_4y_sma'), - price55dEma: createPrice111dSmaPattern(this, 'price_55d_ema'), - price55dSma: createPrice111dSmaPattern(this, 'price_55d_sma'), - price89dEma: createPrice111dSmaPattern(this, 'price_89d_ema'), - price89dSma: createPrice111dSmaPattern(this, 'price_89d_sma'), - price8dEma: createPrice111dSmaPattern(this, 'price_8d_ema'), - price8dSma: createPrice111dSmaPattern(this, 'price_8d_sma') - }, - range: { - price1mMax: createMetricPattern5(this, 'price_1m_max'), - price1mMin: createMetricPattern5(this, 'price_1m_min'), - price1wMax: createMetricPattern5(this, 'price_1w_max'), - price1wMin: createMetricPattern5(this, 'price_1w_min'), - price1yMax: createMetricPattern5(this, 'price_1y_max'), - price1yMin: createMetricPattern5(this, 'price_1y_min'), - price2wChoppinessIndex: createMetricPattern5(this, 'price_2w_choppiness_index'), - price2wMax: createMetricPattern5(this, 'price_2w_max'), - price2wMin: createMetricPattern5(this, 'price_2w_min'), - priceTrueRange: createMetricPattern22(this, 'price_true_range'), - priceTrueRange2wSum: createMetricPattern22(this, 'price_true_range_2w_sum') - }, - returns: { - _1dReturns1mSd: create_1dReturns1mSdPattern(this, '1d_returns_1m_sd'), - _1dReturns1wSd: create_1dReturns1mSdPattern(this, '1d_returns_1w_sd'), - _1dReturns1ySd: create_1dReturns1mSdPattern(this, '1d_returns_1y_sd'), - cagr: createPeriodCagrPattern(this, 'cagr'), - downside1mSd: create_1dReturns1mSdPattern(this, 'downside_1m_sd'), - downside1wSd: create_1dReturns1mSdPattern(this, 'downside_1w_sd'), - downside1ySd: create_1dReturns1mSdPattern(this, 'downside_1y_sd'), - downsideReturns: createMetricPattern22(this, 'downside_returns'), - priceReturns: createPriceAgoPattern(this, 'price_returns') - }, - volatility: { - price1mVolatility: createMetricPattern5(this, 'price_1m_volatility'), - price1wVolatility: createMetricPattern5(this, 'price_1w_volatility'), - price1yVolatility: createMetricPattern5(this, 'price_1y_volatility'), - sharpe1m: createMetricPattern22(this, 'sharpe_1m'), - sharpe1w: createMetricPattern22(this, 'sharpe_1w'), - sharpe1y: createMetricPattern22(this, 'sharpe_1y'), - sortino1m: createMetricPattern22(this, 'sortino_1m'), - sortino1w: createMetricPattern22(this, 'sortino_1w'), - sortino1y: createMetricPattern22(this, 'sortino_1y') - } + time: { + date: createMetricPattern23(this, 'date'), + dateFixed: createMetricPattern23(this, 'date_fixed'), + timestamp: createMetricPattern2(this, 'timestamp'), + timestampFixed: createMetricPattern23(this, 'timestamp_fixed') }, - outputs: { - count: { - count: createCountPattern2(this, 'output_count'), - utxoCount: createDollarsPattern(this, 'exact_utxo_count') - }, - spent: { - txinindex: createMetricPattern29(this, 'txinindex') - } + timestamp: createMetricPattern23(this, 'timestamp'), + totalSize: createMetricPattern23(this, 'total_size'), + weight: { + base: createMetricPattern23(this, 'weight'), + blockFullness: createBitcoinPattern(this, 'block_fullness'), + blockWeight: createBlockSizePattern(this, 'block_weight') + } + }, + cointime: { + activity: { + activityToVaultednessRatio: createMetricPattern1(this, 'activity_to_vaultedness_ratio'), + coinblocksCreated: createBlockCountPattern(this, 'coinblocks_created'), + coinblocksStored: createBlockCountPattern(this, 'coinblocks_stored'), + liveliness: createMetricPattern1(this, 'liveliness'), + vaultedness: createMetricPattern1(this, 'vaultedness') }, - pools: { - pool: createMetricPattern26(this, 'pool'), - vecs: { - aXbt: createAXbtPattern(this, 'axbt'), - aaoPool: createAXbtPattern(this, 'aaopool'), - antPool: createAXbtPattern(this, 'antpool'), - arkPool: createAXbtPattern(this, 'arkpool'), - asicMiner: createAXbtPattern(this, 'asicminer'), - batPool: createAXbtPattern(this, 'batpool'), - bcMonster: createAXbtPattern(this, 'bcmonster'), - bcpoolIo: createAXbtPattern(this, 'bcpoolio'), - binancePool: createAXbtPattern(this, 'binancepool'), - bitClub: createAXbtPattern(this, 'bitclub'), - bitFuFuPool: createAXbtPattern(this, 'bitfufupool'), - bitFury: createAXbtPattern(this, 'bitfury'), - bitMinter: createAXbtPattern(this, 'bitminter'), - bitalo: createAXbtPattern(this, 'bitalo'), - bitcoinAffiliateNetwork: createAXbtPattern(this, 'bitcoinaffiliatenetwork'), - bitcoinCom: createAXbtPattern(this, 'bitcoincom'), - bitcoinIndia: createAXbtPattern(this, 'bitcoinindia'), - bitcoinRussia: createAXbtPattern(this, 'bitcoinrussia'), - bitcoinUkraine: createAXbtPattern(this, 'bitcoinukraine'), - bitfarms: createAXbtPattern(this, 'bitfarms'), - bitparking: createAXbtPattern(this, 'bitparking'), - bitsolo: createAXbtPattern(this, 'bitsolo'), - bixin: createAXbtPattern(this, 'bixin'), - blockFills: createAXbtPattern(this, 'blockfills'), - braiinsPool: createAXbtPattern(this, 'braiinspool'), - bravoMining: createAXbtPattern(this, 'bravomining'), - btPool: createAXbtPattern(this, 'btpool'), - btcCom: createAXbtPattern(this, 'btccom'), - btcDig: createAXbtPattern(this, 'btcdig'), - btcGuild: createAXbtPattern(this, 'btcguild'), - btcLab: createAXbtPattern(this, 'btclab'), - btcMp: createAXbtPattern(this, 'btcmp'), - btcNuggets: createAXbtPattern(this, 'btcnuggets'), - btcPoolParty: createAXbtPattern(this, 'btcpoolparty'), - btcServ: createAXbtPattern(this, 'btcserv'), - btcTop: createAXbtPattern(this, 'btctop'), - btcc: createAXbtPattern(this, 'btcc'), - bwPool: createAXbtPattern(this, 'bwpool'), - bytePool: createAXbtPattern(this, 'bytepool'), - canoe: createAXbtPattern(this, 'canoe'), - canoePool: createAXbtPattern(this, 'canoepool'), - carbonNegative: createAXbtPattern(this, 'carbonnegative'), - ckPool: createAXbtPattern(this, 'ckpool'), - cloudHashing: createAXbtPattern(this, 'cloudhashing'), - coinLab: createAXbtPattern(this, 'coinlab'), - cointerra: createAXbtPattern(this, 'cointerra'), - connectBtc: createAXbtPattern(this, 'connectbtc'), - dPool: createAXbtPattern(this, 'dpool'), - dcExploration: createAXbtPattern(this, 'dcexploration'), - dcex: createAXbtPattern(this, 'dcex'), - digitalBtc: createAXbtPattern(this, 'digitalbtc'), - digitalXMintsy: createAXbtPattern(this, 'digitalxmintsy'), - eclipseMc: createAXbtPattern(this, 'eclipsemc'), - eightBaochi: createAXbtPattern(this, 'eightbaochi'), - ekanemBtc: createAXbtPattern(this, 'ekanembtc'), - eligius: createAXbtPattern(this, 'eligius'), - emcdPool: createAXbtPattern(this, 'emcdpool'), - entrustCharityPool: createAXbtPattern(this, 'entrustcharitypool'), - eobot: createAXbtPattern(this, 'eobot'), - exxBw: createAXbtPattern(this, 'exxbw'), - f2Pool: createAXbtPattern(this, 'f2pool'), - fiftyEightCoin: createAXbtPattern(this, 'fiftyeightcoin'), - foundryUsa: createAXbtPattern(this, 'foundryusa'), - futureBitApolloSolo: createAXbtPattern(this, 'futurebitapollosolo'), - gbMiners: createAXbtPattern(this, 'gbminers'), - ghashIo: createAXbtPattern(this, 'ghashio'), - giveMeCoins: createAXbtPattern(this, 'givemecoins'), - goGreenLight: createAXbtPattern(this, 'gogreenlight'), - haoZhuZhu: createAXbtPattern(this, 'haozhuzhu'), - haominer: createAXbtPattern(this, 'haominer'), - hashBx: createAXbtPattern(this, 'hashbx'), - hashPool: createAXbtPattern(this, 'hashpool'), - helix: createAXbtPattern(this, 'helix'), - hhtt: createAXbtPattern(this, 'hhtt'), - hotPool: createAXbtPattern(this, 'hotpool'), - hummerpool: createAXbtPattern(this, 'hummerpool'), - huobiPool: createAXbtPattern(this, 'huobipool'), - innopolisTech: createAXbtPattern(this, 'innopolistech'), - kanoPool: createAXbtPattern(this, 'kanopool'), - kncMiner: createAXbtPattern(this, 'kncminer'), - kuCoinPool: createAXbtPattern(this, 'kucoinpool'), - lubianCom: createAXbtPattern(this, 'lubiancom'), - luckyPool: createAXbtPattern(this, 'luckypool'), - luxor: createAXbtPattern(this, 'luxor'), - maraPool: createAXbtPattern(this, 'marapool'), - maxBtc: createAXbtPattern(this, 'maxbtc'), - maxiPool: createAXbtPattern(this, 'maxipool'), - megaBigPower: createAXbtPattern(this, 'megabigpower'), - minerium: createAXbtPattern(this, 'minerium'), - miningCity: createAXbtPattern(this, 'miningcity'), - miningDutch: createAXbtPattern(this, 'miningdutch'), - miningKings: createAXbtPattern(this, 'miningkings'), - miningSquared: createAXbtPattern(this, 'miningsquared'), - mmpool: createAXbtPattern(this, 'mmpool'), - mtRed: createAXbtPattern(this, 'mtred'), - multiCoinCo: createAXbtPattern(this, 'multicoinco'), - multipool: createAXbtPattern(this, 'multipool'), - myBtcCoinPool: createAXbtPattern(this, 'mybtccoinpool'), - neopool: createAXbtPattern(this, 'neopool'), - nexious: createAXbtPattern(this, 'nexious'), - niceHash: createAXbtPattern(this, 'nicehash'), - nmcBit: createAXbtPattern(this, 'nmcbit'), - novaBlock: createAXbtPattern(this, 'novablock'), - ocean: createAXbtPattern(this, 'ocean'), - okExPool: createAXbtPattern(this, 'okexpool'), - okMiner: createAXbtPattern(this, 'okminer'), - okkong: createAXbtPattern(this, 'okkong'), - okpoolTop: createAXbtPattern(this, 'okpooltop'), - oneHash: createAXbtPattern(this, 'onehash'), - oneM1x: createAXbtPattern(this, 'onem1x'), - oneThash: createAXbtPattern(this, 'onethash'), - ozCoin: createAXbtPattern(this, 'ozcoin'), - pHashIo: createAXbtPattern(this, 'phashio'), - parasite: createAXbtPattern(this, 'parasite'), - patels: createAXbtPattern(this, 'patels'), - pegaPool: createAXbtPattern(this, 'pegapool'), - phoenix: createAXbtPattern(this, 'phoenix'), - polmine: createAXbtPattern(this, 'polmine'), - pool175btc: createAXbtPattern(this, 'pool175btc'), - pool50btc: createAXbtPattern(this, 'pool50btc'), - poolin: createAXbtPattern(this, 'poolin'), - portlandHodl: createAXbtPattern(this, 'portlandhodl'), - publicPool: createAXbtPattern(this, 'publicpool'), - pureBtcCom: createAXbtPattern(this, 'purebtccom'), - rawpool: createAXbtPattern(this, 'rawpool'), - rigPool: createAXbtPattern(this, 'rigpool'), - sbiCrypto: createAXbtPattern(this, 'sbicrypto'), - secPool: createAXbtPattern(this, 'secpool'), - secretSuperstar: createAXbtPattern(this, 'secretsuperstar'), - sevenPool: createAXbtPattern(this, 'sevenpool'), - shawnP0wers: createAXbtPattern(this, 'shawnp0wers'), - sigmapoolCom: createAXbtPattern(this, 'sigmapoolcom'), - simplecoinUs: createAXbtPattern(this, 'simplecoinus'), - soloCk: createAXbtPattern(this, 'solock'), - spiderPool: createAXbtPattern(this, 'spiderpool'), - stMiningCorp: createAXbtPattern(this, 'stminingcorp'), - tangpool: createAXbtPattern(this, 'tangpool'), - tatmasPool: createAXbtPattern(this, 'tatmaspool'), - tbDice: createAXbtPattern(this, 'tbdice'), - telco214: createAXbtPattern(this, 'telco214'), - terraPool: createAXbtPattern(this, 'terrapool'), - tiger: createAXbtPattern(this, 'tiger'), - tigerpoolNet: createAXbtPattern(this, 'tigerpoolnet'), - titan: createAXbtPattern(this, 'titan'), - transactionCoinMining: createAXbtPattern(this, 'transactioncoinmining'), - trickysBtcPool: createAXbtPattern(this, 'trickysbtcpool'), - tripleMining: createAXbtPattern(this, 'triplemining'), - twentyOneInc: createAXbtPattern(this, 'twentyoneinc'), - ultimusPool: createAXbtPattern(this, 'ultimuspool'), - unknown: createAXbtPattern(this, 'unknown'), - unomp: createAXbtPattern(this, 'unomp'), - viaBtc: createAXbtPattern(this, 'viabtc'), - waterhole: createAXbtPattern(this, 'waterhole'), - wayiCn: createAXbtPattern(this, 'wayicn'), - whitePool: createAXbtPattern(this, 'whitepool'), - wk057: createAXbtPattern(this, 'wk057'), - yourbtcNet: createAXbtPattern(this, 'yourbtcnet'), - zulupool: createAXbtPattern(this, 'zulupool') - } + adjusted: { + cointimeAdjInflationRate: createMetricPattern4(this, 'cointime_adj_inflation_rate'), + cointimeAdjTxBtcVelocity: createMetricPattern4(this, 'cointime_adj_tx_btc_velocity'), + cointimeAdjTxUsdVelocity: createMetricPattern4(this, 'cointime_adj_tx_usd_velocity') }, - positions: { - position: createMetricPattern17(this, 'position') + cap: { + activeCap: createMetricPattern1(this, 'active_cap'), + cointimeCap: createMetricPattern1(this, 'cointime_cap'), + investorCap: createMetricPattern1(this, 'investor_cap'), + thermoCap: createMetricPattern1(this, 'thermo_cap'), + vaultedCap: createMetricPattern1(this, 'vaulted_cap') }, - price: { - ohlc: { - ohlcInCents: createMetricPattern10(this, 'ohlc_in_cents') - }, - sats: { - priceCloseInSats: createMetricPattern1(this, 'price_close_in_sats'), - priceHighInSats: createPriceHighInSatsPattern(this, 'price_high_in_sats'), - priceLowInSats: createPriceLowInSatsPattern(this, 'price_low_in_sats'), - priceOhlcInSats: createMetricPattern1(this, 'price_ohlc_in_sats'), - priceOpenInSats: createMetricPattern1(this, 'price_open_in_sats') - }, - usd: { - priceClose: createMetricPattern1(this, 'price_close'), - priceCloseInCents: createMetricPattern10(this, 'price_close_in_cents'), - priceHigh: createPriceHighInSatsPattern(this, 'price_high'), - priceHighInCents: createMetricPattern10(this, 'price_high_in_cents'), - priceLow: createPriceLowInSatsPattern(this, 'price_low'), - priceLowInCents: createMetricPattern10(this, 'price_low_in_cents'), - priceOhlc: createMetricPattern1(this, 'price_ohlc'), - priceOpen: createMetricPattern1(this, 'price_open'), - priceOpenInCents: createMetricPattern10(this, 'price_open_in_cents') - } - }, - scripts: { - count: { - emptyoutputCount: createDollarsPattern(this, 'emptyoutput_count'), - opreturnCount: createDollarsPattern(this, 'opreturn_count'), - p2aCount: createDollarsPattern(this, 'p2a_count'), - p2msCount: createDollarsPattern(this, 'p2ms_count'), - p2pk33Count: createDollarsPattern(this, 'p2pk33_count'), - p2pk65Count: createDollarsPattern(this, 'p2pk65_count'), - p2pkhCount: createDollarsPattern(this, 'p2pkh_count'), - p2shCount: createDollarsPattern(this, 'p2sh_count'), - p2trCount: createDollarsPattern(this, 'p2tr_count'), - p2wpkhCount: createDollarsPattern(this, 'p2wpkh_count'), - p2wshCount: createDollarsPattern(this, 'p2wsh_count'), - segwitAdoption: createBlockCountPattern(this, 'segwit_adoption'), - segwitCount: createDollarsPattern(this, 'segwit_count'), - taprootAdoption: createBlockCountPattern(this, 'taproot_adoption'), - unknownoutputCount: createDollarsPattern(this, 'unknownoutput_count') - }, - value: { - opreturnValue: createCoinbasePattern(this, 'opreturn_value') - } + pricing: { + activePrice: createMetricPattern1(this, 'active_price'), + activePriceRatio: createActivePriceRatioPattern(this, 'active_price_ratio'), + cointimePrice: createMetricPattern1(this, 'cointime_price'), + cointimePriceRatio: createActivePriceRatioPattern(this, 'cointime_price_ratio'), + trueMarketMean: createMetricPattern1(this, 'true_market_mean'), + trueMarketMeanRatio: createActivePriceRatioPattern(this, 'true_market_mean_ratio'), + vaultedPrice: createMetricPattern1(this, 'vaulted_price'), + vaultedPriceRatio: createActivePriceRatioPattern(this, 'vaulted_price_ratio') }, supply: { - burned: { - opreturn: createUnclaimedRewardsPattern(this, 'opreturn_supply'), - unspendable: createUnclaimedRewardsPattern(this, 'unspendable_supply') + activeSupply: createActiveSupplyPattern(this, 'active_supply'), + vaultedSupply: createActiveSupplyPattern(this, 'vaulted_supply') + }, + value: { + cointimeValueCreated: createBlockCountPattern(this, 'cointime_value_created'), + cointimeValueDestroyed: createBlockCountPattern(this, 'cointime_value_destroyed'), + cointimeValueStored: createBlockCountPattern(this, 'cointime_value_stored') + } + }, + constants: { + constant0: createMetricPattern3(this, 'constant_0'), + constant1: createMetricPattern3(this, 'constant_1'), + constant100: createMetricPattern3(this, 'constant_100'), + constant2: createMetricPattern3(this, 'constant_2'), + constant20: createMetricPattern3(this, 'constant_20'), + constant3: createMetricPattern3(this, 'constant_3'), + constant30: createMetricPattern3(this, 'constant_30'), + constant382: createMetricPattern3(this, 'constant_38_2'), + constant4: createMetricPattern3(this, 'constant_4'), + constant50: createMetricPattern3(this, 'constant_50'), + constant600: createMetricPattern3(this, 'constant_600'), + constant618: createMetricPattern3(this, 'constant_61_8'), + constant70: createMetricPattern3(this, 'constant_70'), + constant80: createMetricPattern3(this, 'constant_80'), + constantMinus1: createMetricPattern3(this, 'constant_minus_1'), + constantMinus2: createMetricPattern3(this, 'constant_minus_2'), + constantMinus3: createMetricPattern3(this, 'constant_minus_3'), + constantMinus4: createMetricPattern3(this, 'constant_minus_4') + }, + distribution: { + addrCount: createAddrCountPattern(this, 'addr_count'), + addressCohorts: { + amountRange: { + _0sats: create_0satsPattern(this, 'addrs_with_0sats'), + _100btcTo1kBtc: create_0satsPattern(this, 'addrs_above_100btc_under_1k_btc'), + _100kBtcOrMore: create_0satsPattern(this, 'addrs_above_100k_btc'), + _100kSatsTo1mSats: create_0satsPattern(this, 'addrs_above_100k_sats_under_1m_sats'), + _100satsTo1kSats: create_0satsPattern(this, 'addrs_above_100sats_under_1k_sats'), + _10btcTo100btc: create_0satsPattern(this, 'addrs_above_10btc_under_100btc'), + _10kBtcTo100kBtc: create_0satsPattern(this, 'addrs_above_10k_btc_under_100k_btc'), + _10kSatsTo100kSats: create_0satsPattern(this, 'addrs_above_10k_sats_under_100k_sats'), + _10mSatsTo1btc: create_0satsPattern(this, 'addrs_above_10m_sats_under_1btc'), + _10satsTo100sats: create_0satsPattern(this, 'addrs_above_10sats_under_100sats'), + _1btcTo10btc: create_0satsPattern(this, 'addrs_above_1btc_under_10btc'), + _1kBtcTo10kBtc: create_0satsPattern(this, 'addrs_above_1k_btc_under_10k_btc'), + _1kSatsTo10kSats: create_0satsPattern(this, 'addrs_above_1k_sats_under_10k_sats'), + _1mSatsTo10mSats: create_0satsPattern(this, 'addrs_above_1m_sats_under_10m_sats'), + _1satTo10sats: create_0satsPattern(this, 'addrs_above_1sat_under_10sats') }, - circulating: { - btc: createMetricPattern26(this, 'circulating_btc'), - indexes: createActiveSupplyPattern(this, 'circulating'), - sats: createMetricPattern26(this, 'circulating_sats'), - usd: createMetricPattern26(this, 'circulating_usd') + geAmount: { + _100btc: create_0satsPattern(this, 'addrs_above_100btc'), + _100kSats: create_0satsPattern(this, 'addrs_above_100k_sats'), + _100sats: create_0satsPattern(this, 'addrs_above_100sats'), + _10btc: create_0satsPattern(this, 'addrs_above_10btc'), + _10kBtc: create_0satsPattern(this, 'addrs_above_10k_btc'), + _10kSats: create_0satsPattern(this, 'addrs_above_10k_sats'), + _10mSats: create_0satsPattern(this, 'addrs_above_10m_sats'), + _10sats: create_0satsPattern(this, 'addrs_above_10sats'), + _1btc: create_0satsPattern(this, 'addrs_above_1btc'), + _1kBtc: create_0satsPattern(this, 'addrs_above_1k_btc'), + _1kSats: create_0satsPattern(this, 'addrs_above_1k_sats'), + _1mSats: create_0satsPattern(this, 'addrs_above_1m_sats'), + _1sat: create_0satsPattern(this, 'addrs_above_1sat') }, - inflation: { - indexes: createIndexesPattern2(this, 'inflation_rate') - }, - marketCap: { - height: createMetricPattern26(this, 'market_cap'), - indexes: createMetricPattern5(this, 'market_cap') - }, - velocity: { - btc: createIndexesPattern2(this, 'btc_velocity'), - usd: createIndexesPattern2(this, 'usd_velocity') + ltAmount: { + _100btc: create_0satsPattern(this, 'addrs_under_100btc'), + _100kBtc: create_0satsPattern(this, 'addrs_under_100k_btc'), + _100kSats: create_0satsPattern(this, 'addrs_under_100k_sats'), + _100sats: create_0satsPattern(this, 'addrs_under_100sats'), + _10btc: create_0satsPattern(this, 'addrs_under_10btc'), + _10kBtc: create_0satsPattern(this, 'addrs_under_10k_btc'), + _10kSats: create_0satsPattern(this, 'addrs_under_10k_sats'), + _10mSats: create_0satsPattern(this, 'addrs_under_10m_sats'), + _10sats: create_0satsPattern(this, 'addrs_under_10sats'), + _1btc: create_0satsPattern(this, 'addrs_under_1btc'), + _1kBtc: create_0satsPattern(this, 'addrs_under_1k_btc'), + _1kSats: create_0satsPattern(this, 'addrs_under_1k_sats'), + _1mSats: create_0satsPattern(this, 'addrs_under_1m_sats') } }, - transactions: { - count: { - isCoinbase: createMetricPattern39(this, 'is_coinbase'), - txCount: createDollarsPattern(this, 'tx_count') + addressesData: { + empty: createMetricPattern39(this, 'emptyaddressdata'), + loaded: createMetricPattern38(this, 'loadedaddressdata') + }, + anyAddressIndexes: { + p2a: createMetricPattern27(this, 'anyaddressindex'), + p2pk33: createMetricPattern29(this, 'anyaddressindex'), + p2pk65: createMetricPattern30(this, 'anyaddressindex'), + p2pkh: createMetricPattern31(this, 'anyaddressindex'), + p2sh: createMetricPattern32(this, 'anyaddressindex'), + p2tr: createMetricPattern33(this, 'anyaddressindex'), + p2wpkh: createMetricPattern34(this, 'anyaddressindex'), + p2wsh: createMetricPattern35(this, 'anyaddressindex') + }, + chainState: createMetricPattern23(this, 'chain'), + emptyAddrCount: createAddrCountPattern(this, 'empty_addr_count'), + emptyaddressindex: createMetricPattern39(this, 'emptyaddressindex'), + loadedaddressindex: createMetricPattern38(this, 'loadedaddressindex'), + utxoCohorts: { + ageRange: { + _10yTo12y: create_10yTo12yPattern(this, 'utxos_at_least_10y_up_to_12y_old'), + _12yTo15y: create_10yTo12yPattern(this, 'utxos_at_least_12y_up_to_15y_old'), + _1dTo1w: create_10yTo12yPattern(this, 'utxos_at_least_1d_up_to_1w_old'), + _1hTo1d: create_10yTo12yPattern(this, 'utxos_at_least_1h_up_to_1d_old'), + _1mTo2m: create_10yTo12yPattern(this, 'utxos_at_least_1m_up_to_2m_old'), + _1wTo1m: create_10yTo12yPattern(this, 'utxos_at_least_1w_up_to_1m_old'), + _1yTo2y: create_10yTo12yPattern(this, 'utxos_at_least_1y_up_to_2y_old'), + _2mTo3m: create_10yTo12yPattern(this, 'utxos_at_least_2m_up_to_3m_old'), + _2yTo3y: create_10yTo12yPattern(this, 'utxos_at_least_2y_up_to_3y_old'), + _3mTo4m: create_10yTo12yPattern(this, 'utxos_at_least_3m_up_to_4m_old'), + _3yTo4y: create_10yTo12yPattern(this, 'utxos_at_least_3y_up_to_4y_old'), + _4mTo5m: create_10yTo12yPattern(this, 'utxos_at_least_4m_up_to_5m_old'), + _4yTo5y: create_10yTo12yPattern(this, 'utxos_at_least_4y_up_to_5y_old'), + _5mTo6m: create_10yTo12yPattern(this, 'utxos_at_least_5m_up_to_6m_old'), + _5yTo6y: create_10yTo12yPattern(this, 'utxos_at_least_5y_up_to_6y_old'), + _6mTo1y: create_10yTo12yPattern(this, 'utxos_at_least_6m_up_to_1y_old'), + _6yTo7y: create_10yTo12yPattern(this, 'utxos_at_least_6y_up_to_7y_old'), + _7yTo8y: create_10yTo12yPattern(this, 'utxos_at_least_7y_up_to_8y_old'), + _8yTo10y: create_10yTo12yPattern(this, 'utxos_at_least_8y_up_to_10y_old'), + from15y: create_10yTo12yPattern(this, 'utxos_at_least_15y_old'), + upTo1h: create_10yTo12yPattern(this, 'utxos_up_to_1h_old') }, - fees: { - fee: { - base: createMetricPattern39(this, 'fee'), - bitcoin: { - average: createMetricPattern1(this, 'fee_btc_average'), - cumulative: createMetricPattern1(this, 'fee_btc_cum'), - max: createMetricPattern1(this, 'fee_btc_max'), - min: createMetricPattern1(this, 'fee_btc_min'), - sum: createMetricPattern1(this, 'fee_btc_sum') - }, - dollars: createCountPattern2(this, 'fee_usd'), - sats: createCountPattern2(this, 'fee') + all: { + activity: createActivityPattern2(this, ''), + costBasis: createCostBasisPattern2(this, ''), + realized: createRealizedPattern3(this, ''), + relative: { + negUnrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern3(this, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl'), + netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createMetricPattern3(this, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl'), + supplyInLossRelToOwnSupply: createMetricPattern3(this, 'supply_in_loss_rel_to_own_supply'), + supplyInProfitRelToOwnSupply: createMetricPattern3(this, 'supply_in_profit_rel_to_own_supply'), + unrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern3(this, 'unrealized_loss_rel_to_own_total_unrealized_pnl'), + unrealizedProfitRelToOwnTotalUnrealizedPnl: createMetricPattern3(this, 'unrealized_profit_rel_to_own_total_unrealized_pnl') }, - feeRate: { - average: createMetricPattern1(this, 'fee_rate_average'), - base: createMetricPattern39(this, 'fee_rate'), - max: createMetricPattern1(this, 'fee_rate_max'), - min: createMetricPattern1(this, 'fee_rate_min'), - percentiles: createPercentilesPattern(this, 'fee_rate') + supply: createSupplyPattern2(this, ''), + unrealized: createUnrealizedPattern(this, '') + }, + amountRange: { + _0sats: create_0satsPattern2(this, 'utxos_with_0sats'), + _100btcTo1kBtc: create_0satsPattern2(this, 'utxos_above_100btc_under_1k_btc'), + _100kBtcOrMore: create_0satsPattern2(this, 'utxos_above_100k_btc'), + _100kSatsTo1mSats: create_0satsPattern2(this, 'utxos_above_100k_sats_under_1m_sats'), + _100satsTo1kSats: create_0satsPattern2(this, 'utxos_above_100sats_under_1k_sats'), + _10btcTo100btc: create_0satsPattern2(this, 'utxos_above_10btc_under_100btc'), + _10kBtcTo100kBtc: create_0satsPattern2(this, 'utxos_above_10k_btc_under_100k_btc'), + _10kSatsTo100kSats: create_0satsPattern2(this, 'utxos_above_10k_sats_under_100k_sats'), + _10mSatsTo1btc: create_0satsPattern2(this, 'utxos_above_10m_sats_under_1btc'), + _10satsTo100sats: create_0satsPattern2(this, 'utxos_above_10sats_under_100sats'), + _1btcTo10btc: create_0satsPattern2(this, 'utxos_above_1btc_under_10btc'), + _1kBtcTo10kBtc: create_0satsPattern2(this, 'utxos_above_1k_btc_under_10k_btc'), + _1kSatsTo10kSats: create_0satsPattern2(this, 'utxos_above_1k_sats_under_10k_sats'), + _1mSatsTo10mSats: create_0satsPattern2(this, 'utxos_above_1m_sats_under_10m_sats'), + _1satTo10sats: create_0satsPattern2(this, 'utxos_above_1sat_under_10sats') + }, + epoch: { + _0: create_0satsPattern2(this, 'epoch_0'), + _1: create_0satsPattern2(this, 'epoch_1'), + _2: create_0satsPattern2(this, 'epoch_2'), + _3: create_0satsPattern2(this, 'epoch_3'), + _4: create_0satsPattern2(this, 'epoch_4') + }, + geAmount: { + _100btc: create_100btcPattern(this, 'utxos_above_100btc'), + _100kSats: create_100btcPattern(this, 'utxos_above_100k_sats'), + _100sats: create_100btcPattern(this, 'utxos_above_100sats'), + _10btc: create_100btcPattern(this, 'utxos_above_10btc'), + _10kBtc: create_100btcPattern(this, 'utxos_above_10k_btc'), + _10kSats: create_100btcPattern(this, 'utxos_above_10k_sats'), + _10mSats: create_100btcPattern(this, 'utxos_above_10m_sats'), + _10sats: create_100btcPattern(this, 'utxos_above_10sats'), + _1btc: create_100btcPattern(this, 'utxos_above_1btc'), + _1kBtc: create_100btcPattern(this, 'utxos_above_1k_btc'), + _1kSats: create_100btcPattern(this, 'utxos_above_1k_sats'), + _1mSats: create_100btcPattern(this, 'utxos_above_1m_sats'), + _1sat: create_100btcPattern(this, 'utxos_above_1sat') + }, + ltAmount: { + _100btc: create_100btcPattern(this, 'utxos_under_100btc'), + _100kBtc: create_100btcPattern(this, 'utxos_under_100k_btc'), + _100kSats: create_100btcPattern(this, 'utxos_under_100k_sats'), + _100sats: create_100btcPattern(this, 'utxos_under_100sats'), + _10btc: create_100btcPattern(this, 'utxos_under_10btc'), + _10kBtc: create_100btcPattern(this, 'utxos_under_10k_btc'), + _10kSats: create_100btcPattern(this, 'utxos_under_10k_sats'), + _10mSats: create_100btcPattern(this, 'utxos_under_10m_sats'), + _10sats: create_100btcPattern(this, 'utxos_under_10sats'), + _1btc: create_100btcPattern(this, 'utxos_under_1btc'), + _1kBtc: create_100btcPattern(this, 'utxos_under_1k_btc'), + _1kSats: create_100btcPattern(this, 'utxos_under_1k_sats'), + _1mSats: create_100btcPattern(this, 'utxos_under_1m_sats') + }, + maxAge: { + _10y: create_10yPattern(this, 'utxos_up_to_10y_old'), + _12y: create_10yPattern(this, 'utxos_up_to_12y_old'), + _15y: create_10yPattern(this, 'utxos_up_to_15y_old'), + _1m: create_10yPattern(this, 'utxos_up_to_1m_old'), + _1w: create_10yPattern(this, 'utxos_up_to_1w_old'), + _1y: create_10yPattern(this, 'utxos_up_to_1y_old'), + _2m: create_10yPattern(this, 'utxos_up_to_2m_old'), + _2y: create_10yPattern(this, 'utxos_up_to_2y_old'), + _3m: create_10yPattern(this, 'utxos_up_to_3m_old'), + _3y: create_10yPattern(this, 'utxos_up_to_3y_old'), + _4m: create_10yPattern(this, 'utxos_up_to_4m_old'), + _4y: create_10yPattern(this, 'utxos_up_to_4y_old'), + _5m: create_10yPattern(this, 'utxos_up_to_5m_old'), + _5y: create_10yPattern(this, 'utxos_up_to_5y_old'), + _6m: create_10yPattern(this, 'utxos_up_to_6m_old'), + _6y: create_10yPattern(this, 'utxos_up_to_6y_old'), + _7y: create_10yPattern(this, 'utxos_up_to_7y_old'), + _8y: create_10yPattern(this, 'utxos_up_to_8y_old') + }, + minAge: { + _10y: create_100btcPattern(this, 'utxos_at_least_10y_old'), + _12y: create_100btcPattern(this, 'utxos_at_least_12y_old'), + _1d: create_100btcPattern(this, 'utxos_at_least_1d_old'), + _1m: create_100btcPattern(this, 'utxos_at_least_1m_old'), + _1w: create_100btcPattern(this, 'utxos_at_least_1w_old'), + _1y: create_100btcPattern(this, 'utxos_at_least_1y_old'), + _2m: create_100btcPattern(this, 'utxos_at_least_2m_old'), + _2y: create_100btcPattern(this, 'utxos_at_least_2y_old'), + _3m: create_100btcPattern(this, 'utxos_at_least_3m_old'), + _3y: create_100btcPattern(this, 'utxos_at_least_3y_old'), + _4m: create_100btcPattern(this, 'utxos_at_least_4m_old'), + _4y: create_100btcPattern(this, 'utxos_at_least_4y_old'), + _5m: create_100btcPattern(this, 'utxos_at_least_5m_old'), + _5y: create_100btcPattern(this, 'utxos_at_least_5y_old'), + _6m: create_100btcPattern(this, 'utxos_at_least_6m_old'), + _6y: create_100btcPattern(this, 'utxos_at_least_6y_old'), + _7y: create_100btcPattern(this, 'utxos_at_least_7y_old'), + _8y: create_100btcPattern(this, 'utxos_at_least_8y_old') + }, + term: { + long: { + activity: createActivityPattern2(this, 'lth'), + costBasis: createCostBasisPattern2(this, 'lth'), + realized: createRealizedPattern2(this, 'lth'), + relative: createRelativePattern5(this, 'lth'), + supply: createSupplyPattern2(this, 'lth'), + unrealized: createUnrealizedPattern(this, 'lth') }, - inputValue: createMetricPattern39(this, 'input_value'), - outputValue: createMetricPattern39(this, 'output_value') + short: { + activity: createActivityPattern2(this, 'sth'), + costBasis: createCostBasisPattern2(this, 'sth'), + realized: createRealizedPattern3(this, 'sth'), + relative: createRelativePattern5(this, 'sth'), + supply: createSupplyPattern2(this, 'sth'), + unrealized: createUnrealizedPattern(this, 'sth') + } }, - size: { - txVsize: createBlockIntervalPattern(this, 'tx_vsize'), - txWeight: createBlockIntervalPattern(this, 'tx_weight'), - vsize: createMetricPattern39(this, 'vsize'), - weight: createMetricPattern39(this, 'weight') + type: { + empty: create_0satsPattern2(this, 'empty_outputs'), + p2a: create_0satsPattern2(this, 'p2a'), + p2ms: create_0satsPattern2(this, 'p2ms'), + p2pk33: create_0satsPattern2(this, 'p2pk33'), + p2pk65: create_0satsPattern2(this, 'p2pk65'), + p2pkh: create_0satsPattern2(this, 'p2pkh'), + p2sh: create_0satsPattern2(this, 'p2sh'), + p2tr: create_0satsPattern2(this, 'p2tr'), + p2wpkh: create_0satsPattern2(this, 'p2wpkh'), + p2wsh: create_0satsPattern2(this, 'p2wsh'), + unknown: create_0satsPattern2(this, 'unknown_outputs') }, - versions: { - txV1: createBlockCountPattern(this, 'tx_v1'), - txV2: createBlockCountPattern(this, 'tx_v2'), - txV3: createBlockCountPattern(this, 'tx_v3') - }, - volume: { - annualizedVolume: createMetricPattern5(this, 'annualized_volume'), - annualizedVolumeBtc: createMetricPattern5(this, 'annualized_volume_btc'), - annualizedVolumeUsd: createMetricPattern5(this, 'annualized_volume_usd'), - inputsPerSec: createMetricPattern5(this, 'inputs_per_sec'), - outputsPerSec: createMetricPattern5(this, 'outputs_per_sec'), - sentSum: { - bitcoin: createMetricPattern1(this, 'sent_sum_btc'), - dollars: createDifficultyAdjustmentPattern(this, 'sent_sum_usd'), - sats: createDifficultyAdjustmentPattern(this, 'sent_sum') - }, - txPerSec: createMetricPattern5(this, 'tx_per_sec') + year: { + _2009: create_0satsPattern2(this, 'year_2009'), + _2010: create_0satsPattern2(this, 'year_2010'), + _2011: create_0satsPattern2(this, 'year_2011'), + _2012: create_0satsPattern2(this, 'year_2012'), + _2013: create_0satsPattern2(this, 'year_2013'), + _2014: create_0satsPattern2(this, 'year_2014'), + _2015: create_0satsPattern2(this, 'year_2015'), + _2016: create_0satsPattern2(this, 'year_2016'), + _2017: create_0satsPattern2(this, 'year_2017'), + _2018: create_0satsPattern2(this, 'year_2018'), + _2019: create_0satsPattern2(this, 'year_2019'), + _2020: create_0satsPattern2(this, 'year_2020'), + _2021: create_0satsPattern2(this, 'year_2021'), + _2022: create_0satsPattern2(this, 'year_2022'), + _2023: create_0satsPattern2(this, 'year_2023'), + _2024: create_0satsPattern2(this, 'year_2024'), + _2025: create_0satsPattern2(this, 'year_2025'), + _2026: create_0satsPattern2(this, 'year_2026') } } }, - indexed: { + indexes: { address: { - firstP2aaddressindex: createMetricPattern26(this, 'first_p2aaddressindex'), - firstP2pk33addressindex: createMetricPattern26(this, 'first_p2pk33addressindex'), - firstP2pk65addressindex: createMetricPattern26(this, 'first_p2pk65addressindex'), - firstP2pkhaddressindex: createMetricPattern26(this, 'first_p2pkhaddressindex'), - firstP2shaddressindex: createMetricPattern26(this, 'first_p2shaddressindex'), - firstP2traddressindex: createMetricPattern26(this, 'first_p2traddressindex'), - firstP2wpkhaddressindex: createMetricPattern26(this, 'first_p2wpkhaddressindex'), - firstP2wshaddressindex: createMetricPattern26(this, 'first_p2wshaddressindex'), - p2abytes: createMetricPattern30(this, 'p2abytes'), - p2pk33bytes: createMetricPattern32(this, 'p2pk33bytes'), - p2pk65bytes: createMetricPattern33(this, 'p2pk65bytes'), - p2pkhbytes: createMetricPattern34(this, 'p2pkhbytes'), - p2shbytes: createMetricPattern35(this, 'p2shbytes'), - p2trbytes: createMetricPattern36(this, 'p2trbytes'), - p2wpkhbytes: createMetricPattern37(this, 'p2wpkhbytes'), - p2wshbytes: createMetricPattern38(this, 'p2wshbytes') + emptyoutputindex: createMetricPattern22(this, 'emptyoutputindex'), + opreturnindex: createMetricPattern25(this, 'opreturnindex'), + p2aaddressindex: createMetricPattern27(this, 'p2aaddressindex'), + p2msoutputindex: createMetricPattern28(this, 'p2msoutputindex'), + p2pk33addressindex: createMetricPattern29(this, 'p2pk33addressindex'), + p2pk65addressindex: createMetricPattern30(this, 'p2pk65addressindex'), + p2pkhaddressindex: createMetricPattern31(this, 'p2pkhaddressindex'), + p2shaddressindex: createMetricPattern32(this, 'p2shaddressindex'), + p2traddressindex: createMetricPattern33(this, 'p2traddressindex'), + p2wpkhaddressindex: createMetricPattern34(this, 'p2wpkhaddressindex'), + p2wshaddressindex: createMetricPattern35(this, 'p2wshaddressindex'), + unknownoutputindex: createMetricPattern37(this, 'unknownoutputindex') }, block: { - blockhash: createMetricPattern26(this, 'blockhash'), - difficulty: createMetricPattern26(this, 'difficulty'), - timestamp: createMetricPattern26(this, 'timestamp'), - totalSize: createMetricPattern26(this, 'total_size'), - weight: createMetricPattern26(this, 'weight') + dateindex: createMetricPattern23(this, 'dateindex'), + difficultyepoch: createMetricPattern12(this, 'difficultyepoch'), + firstHeight: createMetricPattern11(this, 'first_height'), + halvingepoch: createMetricPattern13(this, 'halvingepoch'), + height: createMetricPattern23(this, 'height'), + heightCount: createMetricPattern21(this, 'height_count'), + txindexCount: createMetricPattern23(this, 'txindex_count') }, - output: { - firstEmptyoutputindex: createMetricPattern26(this, 'first_emptyoutputindex'), - firstOpreturnindex: createMetricPattern26(this, 'first_opreturnindex'), - firstP2msoutputindex: createMetricPattern26(this, 'first_p2msoutputindex'), - firstUnknownoutputindex: createMetricPattern26(this, 'first_unknownoutputindex'), - txindex: createMetricPattern8(this, 'txindex') + time: { + date: createMetricPattern19(this, 'date'), + dateindex: createMetricPattern19(this, 'dateindex'), + dateindexCount: createMetricPattern17(this, 'dateindex_count'), + decadeindex: createMetricPattern10(this, 'decadeindex'), + firstDateindex: createMetricPattern17(this, 'first_dateindex'), + firstHeight: createMetricPattern19(this, 'first_height'), + firstMonthindex: createMetricPattern6(this, 'first_monthindex'), + firstYearindex: createMetricPattern20(this, 'first_yearindex'), + heightCount: createMetricPattern19(this, 'height_count'), + monthindex: createMetricPattern8(this, 'monthindex'), + monthindexCount: createMetricPattern6(this, 'monthindex_count'), + quarterindex: createMetricPattern15(this, 'quarterindex'), + semesterindex: createMetricPattern16(this, 'semesterindex'), + weekindex: createMetricPattern9(this, 'weekindex'), + yearindex: createMetricPattern18(this, 'yearindex'), + yearindexCount: createMetricPattern20(this, 'yearindex_count') }, - tx: { - baseSize: createMetricPattern39(this, 'base_size'), - firstTxindex: createMetricPattern26(this, 'first_txindex'), - firstTxinindex: createMetricPattern39(this, 'first_txinindex'), - firstTxoutindex: createMetricPattern39(this, 'first_txoutindex'), - height: createMetricPattern39(this, 'height'), - isExplicitlyRbf: createMetricPattern39(this, 'is_explicitly_rbf'), - rawlocktime: createMetricPattern39(this, 'rawlocktime'), - totalSize: createMetricPattern39(this, 'total_size'), - txid: createMetricPattern39(this, 'txid'), - txversion: createMetricPattern39(this, 'txversion') + transaction: { + inputCount: createMetricPattern36(this, 'input_count'), + outputCount: createMetricPattern36(this, 'output_count'), + txindex: createMetricPattern36(this, 'txindex'), + txinindex: createMetricPattern24(this, 'txinindex'), + txoutindex: createMetricPattern26(this, 'txoutindex') + } + }, + inputs: { + count: { + count: createBlockSizePattern(this, 'input_count') }, - txin: { - firstTxinindex: createMetricPattern26(this, 'first_txinindex'), - outpoint: createMetricPattern27(this, 'outpoint'), - outputtype: createMetricPattern27(this, 'outputtype'), - txindex: createMetricPattern27(this, 'txindex'), - typeindex: createMetricPattern27(this, 'typeindex') + firstTxinindex: createMetricPattern23(this, 'first_txinindex'), + outpoint: createMetricPattern24(this, 'outpoint'), + outputtype: createMetricPattern24(this, 'outputtype'), + spent: { + txoutindex: createMetricPattern24(this, 'txoutindex'), + value: createMetricPattern24(this, 'value') }, - txout: { - firstTxoutindex: createMetricPattern26(this, 'first_txoutindex'), - outputtype: createMetricPattern29(this, 'outputtype'), - txindex: createMetricPattern29(this, 'txindex'), - typeindex: createMetricPattern29(this, 'typeindex'), - value: createMetricPattern29(this, 'value') + txindex: createMetricPattern24(this, 'txindex'), + typeindex: createMetricPattern24(this, 'typeindex') + }, + market: { + ath: { + daysSincePriceAth: createMetricPattern4(this, 'days_since_price_ath'), + maxDaysBetweenPriceAths: createMetricPattern4(this, 'max_days_between_price_aths'), + maxYearsBetweenPriceAths: createMetricPattern4(this, 'max_years_between_price_aths'), + priceAth: createMetricPattern3(this, 'price_ath'), + priceDrawdown: createMetricPattern3(this, 'price_drawdown'), + yearsSincePriceAth: createMetricPattern4(this, 'years_since_price_ath') + }, + dca: { + classAveragePrice: createClassAveragePricePattern(this, 'dca_class'), + classReturns: createClassAveragePricePattern(this, 'dca_class'), + classStack: { + _2015: createActiveSupplyPattern(this, 'dca_class_2015_stack'), + _2016: createActiveSupplyPattern(this, 'dca_class_2016_stack'), + _2017: createActiveSupplyPattern(this, 'dca_class_2017_stack'), + _2018: createActiveSupplyPattern(this, 'dca_class_2018_stack'), + _2019: createActiveSupplyPattern(this, 'dca_class_2019_stack'), + _2020: createActiveSupplyPattern(this, 'dca_class_2020_stack'), + _2021: createActiveSupplyPattern(this, 'dca_class_2021_stack'), + _2022: createActiveSupplyPattern(this, 'dca_class_2022_stack'), + _2023: createActiveSupplyPattern(this, 'dca_class_2023_stack'), + _2024: createActiveSupplyPattern(this, 'dca_class_2024_stack'), + _2025: createActiveSupplyPattern(this, 'dca_class_2025_stack') + }, + periodAveragePrice: createPeriodAveragePricePattern(this, 'dca_average_price'), + periodCagr: createPeriodCagrPattern(this, 'dca_cagr'), + periodLumpSumStack: createPeriodLumpSumStackPattern(this, ''), + periodReturns: createPeriodAveragePricePattern(this, 'dca_returns'), + periodStack: createPeriodLumpSumStackPattern(this, '') + }, + indicators: { + gini: createMetricPattern19(this, 'gini'), + macdHistogram: createMetricPattern19(this, 'macd_histogram'), + macdLine: createMetricPattern19(this, 'macd_line'), + macdSignal: createMetricPattern19(this, 'macd_signal'), + nvt: createMetricPattern4(this, 'nvt'), + piCycle: createMetricPattern19(this, 'pi_cycle'), + puellMultiple: createMetricPattern4(this, 'puell_multiple'), + rsi14d: createMetricPattern19(this, 'rsi_14d'), + rsi14dMax: createMetricPattern19(this, 'rsi_14d_max'), + rsi14dMin: createMetricPattern19(this, 'rsi_14d_min'), + rsiAverageGain14d: createMetricPattern19(this, 'rsi_average_gain_14d'), + rsiAverageLoss14d: createMetricPattern19(this, 'rsi_average_loss_14d'), + rsiGains: createMetricPattern19(this, 'rsi_gains'), + rsiLosses: createMetricPattern19(this, 'rsi_losses'), + stochD: createMetricPattern19(this, 'stoch_d'), + stochK: createMetricPattern19(this, 'stoch_k'), + stochRsi: createMetricPattern19(this, 'stoch_rsi'), + stochRsiD: createMetricPattern19(this, 'stoch_rsi_d'), + stochRsiK: createMetricPattern19(this, 'stoch_rsi_k') + }, + lookback: { + priceAgo: createPriceAgoPattern(this, 'price') + }, + movingAverage: { + price111dSma: createPrice111dSmaPattern(this, 'price_111d_sma'), + price12dEma: createPrice111dSmaPattern(this, 'price_12d_ema'), + price13dEma: createPrice111dSmaPattern(this, 'price_13d_ema'), + price13dSma: createPrice111dSmaPattern(this, 'price_13d_sma'), + price144dEma: createPrice111dSmaPattern(this, 'price_144d_ema'), + price144dSma: createPrice111dSmaPattern(this, 'price_144d_sma'), + price1mEma: createPrice111dSmaPattern(this, 'price_1m_ema'), + price1mSma: createPrice111dSmaPattern(this, 'price_1m_sma'), + price1wEma: createPrice111dSmaPattern(this, 'price_1w_ema'), + price1wSma: createPrice111dSmaPattern(this, 'price_1w_sma'), + price1yEma: createPrice111dSmaPattern(this, 'price_1y_ema'), + price1ySma: createPrice111dSmaPattern(this, 'price_1y_sma'), + price200dEma: createPrice111dSmaPattern(this, 'price_200d_ema'), + price200dSma: createPrice111dSmaPattern(this, 'price_200d_sma'), + price200dSmaX08: createMetricPattern4(this, 'price_200d_sma_x0_8'), + price200dSmaX24: createMetricPattern4(this, 'price_200d_sma_x2_4'), + price200wEma: createPrice111dSmaPattern(this, 'price_200w_ema'), + price200wSma: createPrice111dSmaPattern(this, 'price_200w_sma'), + price21dEma: createPrice111dSmaPattern(this, 'price_21d_ema'), + price21dSma: createPrice111dSmaPattern(this, 'price_21d_sma'), + price26dEma: createPrice111dSmaPattern(this, 'price_26d_ema'), + price2yEma: createPrice111dSmaPattern(this, 'price_2y_ema'), + price2ySma: createPrice111dSmaPattern(this, 'price_2y_sma'), + price34dEma: createPrice111dSmaPattern(this, 'price_34d_ema'), + price34dSma: createPrice111dSmaPattern(this, 'price_34d_sma'), + price350dSma: createPrice111dSmaPattern(this, 'price_350d_sma'), + price350dSmaX2: createMetricPattern4(this, 'price_350d_sma_x2'), + price4yEma: createPrice111dSmaPattern(this, 'price_4y_ema'), + price4ySma: createPrice111dSmaPattern(this, 'price_4y_sma'), + price55dEma: createPrice111dSmaPattern(this, 'price_55d_ema'), + price55dSma: createPrice111dSmaPattern(this, 'price_55d_sma'), + price89dEma: createPrice111dSmaPattern(this, 'price_89d_ema'), + price89dSma: createPrice111dSmaPattern(this, 'price_89d_sma'), + price8dEma: createPrice111dSmaPattern(this, 'price_8d_ema'), + price8dSma: createPrice111dSmaPattern(this, 'price_8d_sma') + }, + range: { + price1mMax: createMetricPattern4(this, 'price_1m_max'), + price1mMin: createMetricPattern4(this, 'price_1m_min'), + price1wMax: createMetricPattern4(this, 'price_1w_max'), + price1wMin: createMetricPattern4(this, 'price_1w_min'), + price1yMax: createMetricPattern4(this, 'price_1y_max'), + price1yMin: createMetricPattern4(this, 'price_1y_min'), + price2wChoppinessIndex: createMetricPattern4(this, 'price_2w_choppiness_index'), + price2wMax: createMetricPattern4(this, 'price_2w_max'), + price2wMin: createMetricPattern4(this, 'price_2w_min'), + priceTrueRange: createMetricPattern19(this, 'price_true_range'), + priceTrueRange2wSum: createMetricPattern19(this, 'price_true_range_2w_sum') + }, + returns: { + _1dReturns1mSd: create_1dReturns1mSdPattern(this, '1d_returns_1m_sd'), + _1dReturns1wSd: create_1dReturns1mSdPattern(this, '1d_returns_1w_sd'), + _1dReturns1ySd: create_1dReturns1mSdPattern(this, '1d_returns_1y_sd'), + cagr: createPeriodCagrPattern(this, 'cagr'), + downside1mSd: create_1dReturns1mSdPattern(this, 'downside_1m_sd'), + downside1wSd: create_1dReturns1mSdPattern(this, 'downside_1w_sd'), + downside1ySd: create_1dReturns1mSdPattern(this, 'downside_1y_sd'), + downsideReturns: createMetricPattern19(this, 'downside_returns'), + priceReturns: createPriceAgoPattern(this, 'price_returns') + }, + volatility: { + price1mVolatility: createMetricPattern4(this, 'price_1m_volatility'), + price1wVolatility: createMetricPattern4(this, 'price_1w_volatility'), + price1yVolatility: createMetricPattern4(this, 'price_1y_volatility'), + sharpe1m: createMetricPattern19(this, 'sharpe_1m'), + sharpe1w: createMetricPattern19(this, 'sharpe_1w'), + sharpe1y: createMetricPattern19(this, 'sharpe_1y'), + sortino1m: createMetricPattern19(this, 'sortino_1m'), + sortino1w: createMetricPattern19(this, 'sortino_1w'), + sortino1y: createMetricPattern19(this, 'sortino_1y') + } + }, + outputs: { + count: { + count: createBlockSizePattern(this, 'output_count'), + utxoCount: createBitcoinPattern(this, 'exact_utxo_count') + }, + firstTxoutindex: createMetricPattern23(this, 'first_txoutindex'), + outputtype: createMetricPattern26(this, 'outputtype'), + spent: { + txinindex: createMetricPattern26(this, 'txinindex') + }, + txindex: createMetricPattern26(this, 'txindex'), + typeindex: createMetricPattern26(this, 'typeindex'), + value: createMetricPattern26(this, 'value') + }, + pools: { + pool: createMetricPattern23(this, 'pool'), + vecs: { + aaopool: createAaopoolPattern(this, 'aaopool'), + antpool: createAaopoolPattern(this, 'antpool'), + arkpool: createAaopoolPattern(this, 'arkpool'), + asicminer: createAaopoolPattern(this, 'asicminer'), + axbt: createAaopoolPattern(this, 'axbt'), + batpool: createAaopoolPattern(this, 'batpool'), + bcmonster: createAaopoolPattern(this, 'bcmonster'), + bcpoolio: createAaopoolPattern(this, 'bcpoolio'), + binancepool: createAaopoolPattern(this, 'binancepool'), + bitalo: createAaopoolPattern(this, 'bitalo'), + bitclub: createAaopoolPattern(this, 'bitclub'), + bitcoinaffiliatenetwork: createAaopoolPattern(this, 'bitcoinaffiliatenetwork'), + bitcoincom: createAaopoolPattern(this, 'bitcoincom'), + bitcoinindia: createAaopoolPattern(this, 'bitcoinindia'), + bitcoinrussia: createAaopoolPattern(this, 'bitcoinrussia'), + bitcoinukraine: createAaopoolPattern(this, 'bitcoinukraine'), + bitfarms: createAaopoolPattern(this, 'bitfarms'), + bitfufupool: createAaopoolPattern(this, 'bitfufupool'), + bitfury: createAaopoolPattern(this, 'bitfury'), + bitminter: createAaopoolPattern(this, 'bitminter'), + bitparking: createAaopoolPattern(this, 'bitparking'), + bitsolo: createAaopoolPattern(this, 'bitsolo'), + bixin: createAaopoolPattern(this, 'bixin'), + blockfills: createAaopoolPattern(this, 'blockfills'), + braiinspool: createAaopoolPattern(this, 'braiinspool'), + bravomining: createAaopoolPattern(this, 'bravomining'), + btcc: createAaopoolPattern(this, 'btcc'), + btccom: createAaopoolPattern(this, 'btccom'), + btcdig: createAaopoolPattern(this, 'btcdig'), + btcguild: createAaopoolPattern(this, 'btcguild'), + btclab: createAaopoolPattern(this, 'btclab'), + btcmp: createAaopoolPattern(this, 'btcmp'), + btcnuggets: createAaopoolPattern(this, 'btcnuggets'), + btcpoolparty: createAaopoolPattern(this, 'btcpoolparty'), + btcserv: createAaopoolPattern(this, 'btcserv'), + btctop: createAaopoolPattern(this, 'btctop'), + btpool: createAaopoolPattern(this, 'btpool'), + bwpool: createAaopoolPattern(this, 'bwpool'), + bytepool: createAaopoolPattern(this, 'bytepool'), + canoe: createAaopoolPattern(this, 'canoe'), + canoepool: createAaopoolPattern(this, 'canoepool'), + carbonnegative: createAaopoolPattern(this, 'carbonnegative'), + ckpool: createAaopoolPattern(this, 'ckpool'), + cloudhashing: createAaopoolPattern(this, 'cloudhashing'), + coinlab: createAaopoolPattern(this, 'coinlab'), + cointerra: createAaopoolPattern(this, 'cointerra'), + connectbtc: createAaopoolPattern(this, 'connectbtc'), + dcex: createAaopoolPattern(this, 'dcex'), + dcexploration: createAaopoolPattern(this, 'dcexploration'), + digitalbtc: createAaopoolPattern(this, 'digitalbtc'), + digitalxmintsy: createAaopoolPattern(this, 'digitalxmintsy'), + dpool: createAaopoolPattern(this, 'dpool'), + eclipsemc: createAaopoolPattern(this, 'eclipsemc'), + eightbaochi: createAaopoolPattern(this, 'eightbaochi'), + ekanembtc: createAaopoolPattern(this, 'ekanembtc'), + eligius: createAaopoolPattern(this, 'eligius'), + emcdpool: createAaopoolPattern(this, 'emcdpool'), + entrustcharitypool: createAaopoolPattern(this, 'entrustcharitypool'), + eobot: createAaopoolPattern(this, 'eobot'), + exxbw: createAaopoolPattern(this, 'exxbw'), + f2pool: createAaopoolPattern(this, 'f2pool'), + fiftyeightcoin: createAaopoolPattern(this, 'fiftyeightcoin'), + foundryusa: createAaopoolPattern(this, 'foundryusa'), + futurebitapollosolo: createAaopoolPattern(this, 'futurebitapollosolo'), + gbminers: createAaopoolPattern(this, 'gbminers'), + ghashio: createAaopoolPattern(this, 'ghashio'), + givemecoins: createAaopoolPattern(this, 'givemecoins'), + gogreenlight: createAaopoolPattern(this, 'gogreenlight'), + haominer: createAaopoolPattern(this, 'haominer'), + haozhuzhu: createAaopoolPattern(this, 'haozhuzhu'), + hashbx: createAaopoolPattern(this, 'hashbx'), + hashpool: createAaopoolPattern(this, 'hashpool'), + helix: createAaopoolPattern(this, 'helix'), + hhtt: createAaopoolPattern(this, 'hhtt'), + hotpool: createAaopoolPattern(this, 'hotpool'), + hummerpool: createAaopoolPattern(this, 'hummerpool'), + huobipool: createAaopoolPattern(this, 'huobipool'), + innopolistech: createAaopoolPattern(this, 'innopolistech'), + kanopool: createAaopoolPattern(this, 'kanopool'), + kncminer: createAaopoolPattern(this, 'kncminer'), + kucoinpool: createAaopoolPattern(this, 'kucoinpool'), + lubiancom: createAaopoolPattern(this, 'lubiancom'), + luckypool: createAaopoolPattern(this, 'luckypool'), + luxor: createAaopoolPattern(this, 'luxor'), + marapool: createAaopoolPattern(this, 'marapool'), + maxbtc: createAaopoolPattern(this, 'maxbtc'), + maxipool: createAaopoolPattern(this, 'maxipool'), + megabigpower: createAaopoolPattern(this, 'megabigpower'), + minerium: createAaopoolPattern(this, 'minerium'), + miningcity: createAaopoolPattern(this, 'miningcity'), + miningdutch: createAaopoolPattern(this, 'miningdutch'), + miningkings: createAaopoolPattern(this, 'miningkings'), + miningsquared: createAaopoolPattern(this, 'miningsquared'), + mmpool: createAaopoolPattern(this, 'mmpool'), + mtred: createAaopoolPattern(this, 'mtred'), + multicoinco: createAaopoolPattern(this, 'multicoinco'), + multipool: createAaopoolPattern(this, 'multipool'), + mybtccoinpool: createAaopoolPattern(this, 'mybtccoinpool'), + neopool: createAaopoolPattern(this, 'neopool'), + nexious: createAaopoolPattern(this, 'nexious'), + nicehash: createAaopoolPattern(this, 'nicehash'), + nmcbit: createAaopoolPattern(this, 'nmcbit'), + novablock: createAaopoolPattern(this, 'novablock'), + ocean: createAaopoolPattern(this, 'ocean'), + okexpool: createAaopoolPattern(this, 'okexpool'), + okkong: createAaopoolPattern(this, 'okkong'), + okminer: createAaopoolPattern(this, 'okminer'), + okpooltop: createAaopoolPattern(this, 'okpooltop'), + onehash: createAaopoolPattern(this, 'onehash'), + onem1x: createAaopoolPattern(this, 'onem1x'), + onethash: createAaopoolPattern(this, 'onethash'), + ozcoin: createAaopoolPattern(this, 'ozcoin'), + parasite: createAaopoolPattern(this, 'parasite'), + patels: createAaopoolPattern(this, 'patels'), + pegapool: createAaopoolPattern(this, 'pegapool'), + phashio: createAaopoolPattern(this, 'phashio'), + phoenix: createAaopoolPattern(this, 'phoenix'), + polmine: createAaopoolPattern(this, 'polmine'), + pool175btc: createAaopoolPattern(this, 'pool175btc'), + pool50btc: createAaopoolPattern(this, 'pool50btc'), + poolin: createAaopoolPattern(this, 'poolin'), + portlandhodl: createAaopoolPattern(this, 'portlandhodl'), + publicpool: createAaopoolPattern(this, 'publicpool'), + purebtccom: createAaopoolPattern(this, 'purebtccom'), + rawpool: createAaopoolPattern(this, 'rawpool'), + rigpool: createAaopoolPattern(this, 'rigpool'), + sbicrypto: createAaopoolPattern(this, 'sbicrypto'), + secpool: createAaopoolPattern(this, 'secpool'), + secretsuperstar: createAaopoolPattern(this, 'secretsuperstar'), + sevenpool: createAaopoolPattern(this, 'sevenpool'), + shawnp0wers: createAaopoolPattern(this, 'shawnp0wers'), + sigmapoolcom: createAaopoolPattern(this, 'sigmapoolcom'), + simplecoinus: createAaopoolPattern(this, 'simplecoinus'), + solock: createAaopoolPattern(this, 'solock'), + spiderpool: createAaopoolPattern(this, 'spiderpool'), + stminingcorp: createAaopoolPattern(this, 'stminingcorp'), + tangpool: createAaopoolPattern(this, 'tangpool'), + tatmaspool: createAaopoolPattern(this, 'tatmaspool'), + tbdice: createAaopoolPattern(this, 'tbdice'), + telco214: createAaopoolPattern(this, 'telco214'), + terrapool: createAaopoolPattern(this, 'terrapool'), + tiger: createAaopoolPattern(this, 'tiger'), + tigerpoolnet: createAaopoolPattern(this, 'tigerpoolnet'), + titan: createAaopoolPattern(this, 'titan'), + transactioncoinmining: createAaopoolPattern(this, 'transactioncoinmining'), + trickysbtcpool: createAaopoolPattern(this, 'trickysbtcpool'), + triplemining: createAaopoolPattern(this, 'triplemining'), + twentyoneinc: createAaopoolPattern(this, 'twentyoneinc'), + ultimuspool: createAaopoolPattern(this, 'ultimuspool'), + unknown: createAaopoolPattern(this, 'unknown'), + unomp: createAaopoolPattern(this, 'unomp'), + viabtc: createAaopoolPattern(this, 'viabtc'), + waterhole: createAaopoolPattern(this, 'waterhole'), + wayicn: createAaopoolPattern(this, 'wayicn'), + whitepool: createAaopoolPattern(this, 'whitepool'), + wk057: createAaopoolPattern(this, 'wk057'), + yourbtcnet: createAaopoolPattern(this, 'yourbtcnet'), + zulupool: createAaopoolPattern(this, 'zulupool') + } + }, + positions: { + position: createMetricPattern14(this, 'position') + }, + price: { + cents: { + ohlc: createMetricPattern7(this, 'ohlc_cents') + }, + sats: { + priceCloseSats: createMetricPattern1(this, 'price_close_sats'), + priceHighSats: createPriceHighSatsPattern(this, 'price_high_sats'), + priceLowSats: createPriceHighSatsPattern(this, 'price_low_sats'), + priceOhlcSats: createMetricPattern1(this, 'price_ohlc_sats'), + priceOpenSats: createMetricPattern1(this, 'price_open_sats') + }, + usd: { + priceClose: createMetricPattern1(this, 'price_close'), + priceCloseCents: createMetricPattern7(this, 'price_close_cents'), + priceHigh: createPriceHighSatsPattern(this, 'price_high'), + priceHighCents: createMetricPattern7(this, 'price_high_cents'), + priceLow: createPriceHighSatsPattern(this, 'price_low'), + priceLowCents: createMetricPattern7(this, 'price_low_cents'), + priceOhlc: createMetricPattern1(this, 'price_ohlc'), + priceOpen: createMetricPattern1(this, 'price_open'), + priceOpenCents: createMetricPattern7(this, 'price_open_cents') + } + }, + scripts: { + count: { + emptyoutput: createBitcoinPattern(this, 'emptyoutput_count'), + opreturn: createBitcoinPattern(this, 'opreturn_count'), + p2a: createBitcoinPattern(this, 'p2a_count'), + p2ms: createBitcoinPattern(this, 'p2ms_count'), + p2pk33: createBitcoinPattern(this, 'p2pk33_count'), + p2pk65: createBitcoinPattern(this, 'p2pk65_count'), + p2pkh: createBitcoinPattern(this, 'p2pkh_count'), + p2sh: createBitcoinPattern(this, 'p2sh_count'), + p2tr: createBitcoinPattern(this, 'p2tr_count'), + p2wpkh: createBitcoinPattern(this, 'p2wpkh_count'), + p2wsh: createBitcoinPattern(this, 'p2wsh_count'), + segwit: createBitcoinPattern(this, 'segwit_count'), + segwitAdoption: createSatsPattern(this, 'segwit_adoption'), + taprootAdoption: createSatsPattern(this, 'taproot_adoption'), + unknownoutput: createBitcoinPattern(this, 'unknownoutput_count') + }, + emptyToTxindex: createMetricPattern22(this, 'txindex'), + firstEmptyoutputindex: createMetricPattern23(this, 'first_emptyoutputindex'), + firstOpreturnindex: createMetricPattern23(this, 'first_opreturnindex'), + firstP2msoutputindex: createMetricPattern23(this, 'first_p2msoutputindex'), + firstUnknownoutputindex: createMetricPattern23(this, 'first_unknownoutputindex'), + opreturnToTxindex: createMetricPattern25(this, 'txindex'), + p2msToTxindex: createMetricPattern28(this, 'txindex'), + unknownToTxindex: createMetricPattern37(this, 'txindex'), + value: { + opreturn: createCoinbasePattern(this, 'opreturn_value') + } + }, + supply: { + burned: { + opreturn: createUnclaimedRewardsPattern(this, 'opreturn_supply'), + unspendable: createUnclaimedRewardsPattern(this, 'unspendable_supply') + }, + circulating: createActiveSupplyPattern(this, 'circulating'), + inflation: createMetricPattern4(this, 'inflation_rate'), + marketCap: createMetricPattern3(this, 'market_cap'), + velocity: { + btc: createMetricPattern4(this, 'btc_velocity'), + usd: createMetricPattern4(this, 'usd_velocity') + } + }, + transactions: { + baseSize: createMetricPattern36(this, 'base_size'), + count: { + isCoinbase: createMetricPattern36(this, 'is_coinbase'), + txCount: createBitcoinPattern(this, 'tx_count') + }, + fees: { + fee: { + bitcoin: createBlockSizePattern(this, 'fee_btc'), + dollars: createBlockSizePattern(this, 'fee_usd'), + sats: createBitcoinPattern(this, 'fee') + }, + feeRate: createIntervalPattern(this, 'fee_rate'), + inputValue: createMetricPattern36(this, 'input_value'), + outputValue: createMetricPattern36(this, 'output_value') + }, + firstTxindex: createMetricPattern23(this, 'first_txindex'), + firstTxinindex: createMetricPattern36(this, 'first_txinindex'), + firstTxoutindex: createMetricPattern36(this, 'first_txoutindex'), + height: createMetricPattern36(this, 'height'), + isExplicitlyRbf: createMetricPattern36(this, 'is_explicitly_rbf'), + rawlocktime: createMetricPattern36(this, 'rawlocktime'), + size: { + txVsize: createTxVsizePattern(this, 'tx_vsize'), + txWeight: createTxVsizePattern(this, 'tx_weight'), + vsize: createMetricPattern36(this, 'vsize'), + weight: createMetricPattern36(this, 'weight') + }, + totalSize: createMetricPattern36(this, 'total_size'), + txid: createMetricPattern36(this, 'txid'), + txversion: createMetricPattern36(this, 'txversion'), + versions: { + txV1: createBlockCountPattern(this, 'tx_v1'), + txV2: createBlockCountPattern(this, 'tx_v2'), + txV3: createBlockCountPattern(this, 'tx_v3') + }, + volume: { + annualizedVolume: createMetricPattern4(this, 'annualized_volume'), + annualizedVolumeBtc: createMetricPattern4(this, 'annualized_volume_btc'), + annualizedVolumeUsd: createMetricPattern4(this, 'annualized_volume_usd'), + inputsPerSec: createMetricPattern4(this, 'inputs_per_sec'), + outputsPerSec: createMetricPattern4(this, 'outputs_per_sec'), + sentSum: createActiveSupplyPattern(this, 'sent_sum'), + txPerSec: createMetricPattern4(this, 'tx_per_sec') } } }; @@ -6873,20 +6521,20 @@ class BrkClient extends BrkClientBase { /** * Get metric data * @description Fetch data for a specific metric at the given index. Use query parameters to filter by date range and format (json/csv). - * @param {Metric} metric Metric name * @param {Index} index Aggregation index - * @param {*=} [from] Inclusive starting index, if negative counts from end - * @param {*=} [to] Exclusive ending index, if negative counts from end + * @param {Metric} metric Metric name * @param {*=} [count] Number of values to return (ignored if `to` is set) * @param {Format=} [format] Format of the output + * @param {*=} [from] Inclusive starting index, if negative counts from end + * @param {*=} [to] Exclusive ending index, if negative counts from end * @returns {Promise} */ - async getMetricByIndex(metric, index, from, to, count, format) { + async getMetricByIndex(index, metric, count, format, from, to) { const params = new URLSearchParams(); - if (from !== undefined) params.set('from', String(from)); - if (to !== undefined) params.set('to', String(to)); if (count !== undefined) params.set('count', String(count)); if (format !== undefined) params.set('format', String(format)); + if (from !== undefined) params.set('from', String(from)); + if (to !== undefined) params.set('to', String(to)); const query = params.toString(); return this.get(`/api/metric/${metric}/${index}${query ? '?' + query : ''}`); } @@ -6894,22 +6542,22 @@ class BrkClient extends BrkClientBase { /** * Bulk metric data * @description Fetch multiple metrics in a single request. Supports filtering by index and date range. Returns an array of MetricData objects. - * @param {Metrics} [metrics] Requested metrics - * @param {Index} [index] Index to query - * @param {*=} [from] Inclusive starting index, if negative counts from end - * @param {*=} [to] Exclusive ending index, if negative counts from end * @param {*=} [count] Number of values to return (ignored if `to` is set) * @param {Format=} [format] Format of the output + * @param {*=} [from] Inclusive starting index, if negative counts from end + * @param {Index} [index] Index to query + * @param {Metrics} [metrics] Requested metrics + * @param {*=} [to] Exclusive ending index, if negative counts from end * @returns {Promise} */ - async getMetricsBulk(metrics, index, from, to, count, format) { + async getMetricsBulk(count, format, from, index, metrics, to) { const params = new URLSearchParams(); - params.set('metrics', String(metrics)); - params.set('index', String(index)); - if (from !== undefined) params.set('from', String(from)); - if (to !== undefined) params.set('to', String(to)); if (count !== undefined) params.set('count', String(count)); if (format !== undefined) params.set('format', String(format)); + if (from !== undefined) params.set('from', String(from)); + params.set('index', String(index)); + params.set('metrics', String(metrics)); + if (to !== undefined) params.set('to', String(to)); const query = params.toString(); return this.get(`/api/metrics/bulk${query ? '?' + query : ''}`); } @@ -7191,51 +6839,6 @@ class BrkClient extends BrkClientBase { return this.get(`/version`); } - - /** - * Merge multiple MetricPatterns into a single pattern. - * Throws if any two patterns have overlapping indexes. - * @template T - * @param {...MetricPattern} patterns - The patterns to merge - * @returns {MetricPattern} A new merged pattern - */ - mergeMetricPatterns(...patterns) { - if (patterns.length === 0) { - throw new BrkError('mergeMetricPatterns requires at least one pattern'); - } - if (patterns.length === 1) { - return patterns[0]; - } - - const seenIndexes = /** @type {Map} */ (new Map()); - const mergedBy = /** @type {Partial>>} */ ({}); - - for (const pattern of patterns) { - for (const index of pattern.indexes()) { - const existing = seenIndexes.get(index); - if (existing !== undefined) { - throw new BrkError(`Index '${index}' exists in both '${existing}' and '${pattern.name}'`); - } - seenIndexes.set(index, pattern.name); - Object.defineProperty(mergedBy, index, { - get() { return pattern.get(index); }, - enumerable: true, - configurable: true, - }); - } - } - - const allIndexes = /** @type {Index[]} */ ([...seenIndexes.keys()]); - const firstName = patterns[0].name; - - return { - name: firstName, - by: mergedBy, - indexes() { return allIndexes; }, - get(index) { return mergedBy[index]; }, - }; - } - } export { BrkClient, BrkError }; diff --git a/modules/brk-client/package.json b/modules/brk-client/package.json index 462ba3bbc..f93ced113 100644 --- a/modules/brk-client/package.json +++ b/modules/brk-client/package.json @@ -1,9 +1,11 @@ { - "name": "@bitcoinresearchkit/client", - "version": "0.1.0-alpha.2", + "bugs": { + "url": "https://github.com/bitcoinresearchkit/brk/issues" + }, "description": "BRK JavaScript client", - "type": "module", - "main": "index.js", + "engines": { + "node": ">=18" + }, "exports": { ".": "./index.js" }, @@ -11,6 +13,7 @@ "index.js", "generated" ], + "homepage": "https://github.com/bitcoinresearchkit/brk/tree/main/modules/brk-client", "keywords": [ "brk", "bitcoin", @@ -18,16 +21,13 @@ "research" ], "license": "MIT", + "main": "index.js", + "name": "@bitcoinresearchkit/client", "repository": { + "directory": "modules/brk-client", "type": "git", - "url": "git+https://github.com/bitcoinresearchkit/brk.git", - "directory": "modules/brk-client" + "url": "git+https://github.com/bitcoinresearchkit/brk.git" }, - "homepage": "https://github.com/bitcoinresearchkit/brk/tree/main/modules/brk-client", - "bugs": { - "url": "https://github.com/bitcoinresearchkit/brk/issues" - }, - "engines": { - "node": ">=18" - } + "type": "module", + "version": "0.1.0-alpha.2" } diff --git a/modules/lean-qr/2.6.0/index.d.ts b/modules/lean-qr/2.6.0/index.d.ts deleted file mode 100644 index 9764296d1..000000000 --- a/modules/lean-qr/2.6.0/index.d.ts +++ /dev/null @@ -1,656 +0,0 @@ -declare module 'lean-qr' { - interface ImageDataLike { - readonly data: Uint8ClampedArray; - } - - interface Context2DLike { - createImageData(width: number, height: number): DataT; - putImageData(data: DataT, x: number, y: number): void; - } - - interface CanvasLike { - width: number; - height: number; - getContext(type: '2d'): Context2DLike | null; - } - - /** - * A colour in `[red, green, blue, alpha]` format (all values from 0 to 255). - * If alpha is omitted, it is assumed to be 255 (opaque). - */ - export type RGBA = readonly [number, number, number, number?]; - - export interface Bitmap1D { - /** - * Appends a sequence of bits. - * - * @param value an integer containing the bits to append (big endian). - * @param bits the number of bits to read from `value`. Must be between 1 and 24. - */ - push(value: number, bits: number): void; - } - - export interface StringOptions { - /** the text to use for modules which are 'on' (typically black) */ - on?: string; - - /** the text to use for modules which are 'off' (typically white) */ - off?: string; - - /** the text to use for linefeeds between rows */ - lf?: string; - - /** the padding to apply around the output (populated with 'off' modules) */ - pad?: number; - - /** - * the padding to apply on the left and right of the output (populated with 'off' modules) - * @deprecated use `pad` instead - */ - padX?: number; - - /** - * the padding to apply on the top and bottom of the output (populated with 'off' modules) - * @deprecated use `pad` instead - */ - padY?: number; - } - - export interface ImageDataOptions { - /** the colour to use for modules which are 'on' (typically black) */ - on?: RGBA; - - /** the colour to use for modules which are 'off' (typically white) */ - off?: RGBA; - - /** the padding to apply around the output (filled with 'off') */ - pad?: number; - - /** - * the padding to apply on the left and right of the output (filled with 'off') - * @deprecated use `pad` instead - */ - padX?: number; - - /** - * the padding to apply on the top and bottom of the output (filled with 'off') - * @deprecated use `pad` instead - */ - padY?: number; - } - - export interface Bitmap2D { - /** the width / height of the QR code in modules (excluding any padding) */ - readonly size: number; - - /** - * Read the state of a module from the QR code. - * - * @param x the x coordinate to read. Can be negative / out of bounds. - * @param y the y coordinate to read. Can be negative / out of bounds. - * @returns true if the requested module is set (i.e. typically black) - */ - get(x: number, y: number): boolean; - - /** - * Generate a string containing the QR code, suitable for displaying in a - * terminal environment. Generally, you should customise on and off to use - * the ANSI escapes of your target terminal for better rendering. - * - * @param options optional configuration for the display. - */ - toString(options?: Readonly): string; - - /** - * Generate image data containing the QR code, at a scale of 1 pixel per - * module. Use this if you need more control than toCanvas allows. - * - * @param context a context to use for creating the image data. - * @param options optional configuration for the display. - */ - toImageData( - context: Context2DLike, - options?: Readonly, - ): DataT; - - /** - * Generate a `data:image/*` URL for the QR code. - * - * @param options optional configuration for the output. - * @returns a string suitable for use as the `src` of an `img` tag. - */ - toDataURL( - options?: Readonly< - ImageDataOptions & { - type?: `image/${string}`; - scale?: number; - } - >, - ): string; - - /** - * Populate a given canvas with the QR code, at a scale of 1 pixel per - * module. Set image-rendering: pixelated and scale the canvas using CSS - * for a large image. Automatically resizes the canvas to fit the QR code - * if necessary. - * - * @param canvas the canvas to populate. - * @param options optional configuration for the display. - */ - toCanvas( - canvas: CanvasLike, - options?: Readonly, - ): void; - } - - export type Mask = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7; - export type Mode = (data: Bitmap1D, version: number) => void; - export interface ModeFactory { - (value: string): Mode; - /** a function which returns true when given a character which the current mode can represent */ - test(string: string): boolean; - /** a function which returns an estimate of the number of bits required to encode a given value */ - est(value: string, version: number): number; - /** an optional ECI which must be active for this mode to be interpreted correctly by a reader */ - eci?: number; - } - - interface ModeAutoOptions { - /** a list of modes which can be considered when encoding a message */ - modes?: ReadonlyArray; - } - - export const mode: Readonly<{ - /** automatically picks the most optimal combination of modes for the requested message */ - auto(value: string, options?: Readonly): Mode; - /** concatenates multiple modes together */ - multi(...modes: ReadonlyArray): Mode; - /** sets the Extended Channel Interpretation for the message from this point onwards */ - eci(id: number): Mode; - /** supports `0-9` and stores 3 characters per 10 bits */ - numeric: ModeFactory; - /** supports `0-9A-Z $%*+-./:` and stores 2 characters per 11 bits */ - alphaNumeric: ModeFactory; - /** arbitrary byte data, typically combined with `eci` */ - bytes(data: Uint8Array | ReadonlyArray): Mode; - /** supports 7-bit ASCII and stores 1 character per 8 bits with no ECI */ - ascii: ModeFactory; - /** supports 8-bit ISO-8859-1 and stores 1 character per 8 bits with ECI 3 */ - iso8859_1: ModeFactory; - /** supports double-byte Shift-JIS characters stores 1 character per 13 bits */ - shift_jis: ModeFactory; - /** supports variable length UTF-8 with ECI 26 */ - utf8: ModeFactory; - }>; - - export type Correction = number & { readonly _: unique symbol }; - export const correction: Readonly<{ - /** - * minimum possible correction level (same as L) - * @deprecated use correction.L - */ - min: Correction; - /** ~7.5% error tolerance, ~25% data overhead */ - L: Correction; - /** ~15% error tolerance, ~60% data overhead */ - M: Correction; - /** ~22.5% error tolerance, ~120% data overhead */ - Q: Correction; - /** ~30% error tolerance, ~190% data overhead */ - H: Correction; - /** - * maximum possible correction level (same as H) - * @deprecated use correction.H - */ - max: Correction; - }>; - - export interface GenerateOptions extends ModeAutoOptions { - /** the minimum correction level to use (higher levels may still be used if the chosen version has space) */ - minCorrectionLevel?: Correction; - /** the maximum correction level to use */ - maxCorrectionLevel?: Correction; - /** the minimum version (size) of code to generate (must be between 1 and 40) */ - minVersion?: number; - /** the maximum version (size) of code to generate (must be between 1 and 40) */ - maxVersion?: number; - /** a mask to use on the QR code (should be left as `null` for ISO compliance but may be changed for artistic effect) */ - mask?: null | Mask; - /** padding bits to use for extra space in the QR code (should be left as the default for ISO compliance but may be changed for artistic effect) */ - trailer?: number; - } - - /** - * Generate a QR code. - * - * @param data either a string, or a pre-encoded mode. - * @param options optional configuration for the QR code. - * @returns the requested QR code. - */ - export type GenerateFn = ( - data: Mode | string, - options?: Readonly, - ) => Bitmap2D; - interface Generate extends GenerateFn { - /** - * Creates a scoped `generate` function which considers additional modes - * when using auto encoding. - * - * @param modes the modes to add. - * @returns a `generate` function which will additionally consider the - * given modes when using auto encoding. - * - * @deprecated this will be removed in version 3. Prefer passing an explicit list of modes when calling `generate`. - */ - with(...modes: ReadonlyArray): GenerateFn; - } - export const generate: Generate; -} - -declare module 'lean-qr/nano' { - import type { - Correction, - Bitmap2D as FullBitmap2D, - GenerateOptions as FullGenerateOptions, - } from 'lean-qr'; - import { correction as fullCorrection } from 'lean-qr'; - - export type { Correction }; - - export const correction: Pick; - - export type Bitmap2D = Pick; - - export type GenerateOptions = Pick< - FullGenerateOptions, - 'minCorrectionLevel' | 'minVersion' - >; - - /** - * Generate a QR code. - * - * @param data either a string, or a pre-encoded mode. - * @param options optional configuration for the QR code. - * @returns the requested QR code. - */ - export function generate( - data: string, - options?: Readonly, - ): Bitmap2D; -} - -declare module 'lean-qr/extras/svg' { - import type { Bitmap2D as FullBitmap2D } from 'lean-qr'; - - type Bitmap2D = Pick; - - export interface SVGOptions { - /** the colour to use for modules which are 'on' (typically black) */ - on?: string; - /** the colour to use for modules which are 'off' (typically white) */ - off?: string; - /** the padding to apply around the output (filled with 'off') */ - pad?: number; - /** - * the padding to apply on the left and right of the output (filled with 'off') - * @deprecated use `pad` instead - */ - padX?: number; - /** - * the padding to apply on the top and bottom of the output (filled with 'off') - * @deprecated use `pad` instead - */ - padY?: number; - /** a width to apply to the resulting image (overrides `scale`) */ - width?: number | null; - /** a height to apply to the resulting image (overrides `scale`) */ - height?: number | null; - /** a scale to apply to the resulting image (`scale` pixels = 1 module) */ - scale?: number; - } - - /** - * Generate the raw outline of the QR code for use in an existing SVG. - * - * @param code the QR code to convert. - * @returns a string suitable for passing to the `d` attribute of a `path`. - */ - export function toSvgPath(code: Bitmap2D): string; - - /** - * Generate an SVG element which can be added to the DOM. - * - * @param code the QR code to convert. - * @param options optional configuration for the display. - * @returns an SVG element. - */ - export function toSvg( - code: Bitmap2D, - target: Document | SVGElement, - options?: Readonly, - ): SVGElement; - - /** - * Generate an SVG document which can be exported to a file or served from a - * web server. - * - * @param code the QR code to convert. - * @param options optional configuration for the display. - * @returns an SVG document. - */ - export function toSvgSource( - code: Bitmap2D, - options?: Readonly< - SVGOptions & { - /** `true` to include an XML declaration at the start of the source (for standalone documents which will not be embedded inside another document) */ - xmlDeclaration?: boolean; - } - >, - ): string; - - /** - * Generate a `data:image/svg+xml` URL. - * - * @param code the QR code to convert. - * @param options optional configuration for the display. - * @returns a string suitable for use as the `src` of an `img` tag. - */ - export function toSvgDataURL( - code: Bitmap2D, - options?: Readonly, - ): string; -} - -declare module 'lean-qr/extras/node_export' { - import type { RGBA, Bitmap2D as FullBitmap2D } from 'lean-qr'; - - type Bitmap2D = Pick; - - export interface PNGOptions { - /** the colour to use for modules which are 'on' (typically black) */ - on?: RGBA; - /** the colour to use for modules which are 'off' (typically white) */ - off?: RGBA; - /** the padding to apply around the output (filled with 'off') */ - pad?: number; - /** - * the padding to apply on the left and right of the output (filled with 'off') - * @deprecated use `pad` instead - */ - padX?: number; - /** - * the padding to apply on the top and bottom of the output (filled with 'off') - * @deprecated use `pad` instead - */ - padY?: number; - /** a scale to apply to the resulting image (`scale` pixels = 1 module) */ - scale?: number; - } - - /** - * Generate a PNG document which can be exported to a file or served from a - * web server. - * - * @param code the QR code to convert. - * @param options optional configuration for the display. - * @returns a PNG document. - */ - export function toPngBuffer( - code: Bitmap2D, - options?: Readonly, - ): Uint8Array; - - /** - * Generate a `data:image/png` URL. - * - * @param code the QR code to convert. - * @param options optional configuration for the display. - * @returns a string suitable for use as the `src` of an `img` tag. - */ - export function toPngDataURL( - code: Bitmap2D, - options?: Readonly, - ): string; -} - -declare module 'lean-qr/extras/react' { - import type { - Bitmap2D as FullBitmap2D, - GenerateOptions, - ImageDataOptions, - } from 'lean-qr'; - import type { - SVGOptions, - toSvgDataURL as toSvgDataURLFn, - } from 'lean-qr/extras/svg'; - - export interface AsyncFramework { - createElement: ( - type: 'canvas', - props: { - ref: any; - style: { imageRendering: 'pixelated' }; - className: string; - }, - ) => T; - useRef(initialValue: T | null): { readonly current: T | null }; - useEffect(fn: () => void | (() => void), deps: unknown[]): void; - } - - interface QRComponentProps { - content: string; - className?: string; - } - - export interface AsyncQRComponentProps - extends ImageDataOptions, - GenerateOptions, - QRComponentProps {} - - export type AsyncQRComponent = ( - props: Readonly, - ) => T; - - /** - * Generate an asynchronous QR component (rendering to a `canvas`). - * You should call this just once, in the global scope. - * - * ```js - * import * as React from 'react'; - * import { generate } from 'lean-qr'; - * import { makeAsyncComponent } from 'lean-qr/extras/react'; - * const QR = makeAsyncComponent(React, generate); - * ``` - * - * This is not suitable for server-side rendering (use `makeSyncComponent` - * instead). - * - * @param framework the framework to use (e.g. `React`). - * @param generate the `generate` function to use - * (from `lean-qr` or `lean-qr/nano`). - * @param defaultProps optional default properties to apply when the - * component is used (overridden by properties set on use). - * @returns a component which can be rendered elsewhere. - */ - export function makeAsyncComponent( - framework: Readonly>, - generate: ( - data: string, - options?: Readonly, - ) => Pick, - defaultProps?: Readonly>, - ): AsyncQRComponent; - - export interface SyncFramework { - createElement: ( - type: 'img', - props: { - src: string; - style: { imageRendering: 'pixelated' }; - className: string; - }, - ) => T; - useMemo(fn: () => T, deps: unknown[]): T; - } - - export interface SyncQRComponentProps - extends SVGOptions, - GenerateOptions, - QRComponentProps {} - - export type SyncQRComponent = (props: Readonly) => T; - - /** - * Generate a synchronous QR component (rendering to an SVG). - * You should call this just once, in the global scope. - * - * ```js - * import * as React from 'react'; - * import { generate } from 'lean-qr'; - * import { toSvgDataURL } from 'lean-qr/extras/svg'; - * import { makeSyncComponent } from 'lean-qr/extras/react'; - * const QR = makeSyncComponent(React, generate, toSvgDataURL); - * ``` - * - * This is best suited for server-side rendering (prefer - * `makeAsyncComponent` if you only need client-side rendering). - * - * @param framework the framework to use (e.g. `React`). - * @param generate the `generate` function to use - * (from `lean-qr` or `lean-qr/nano`). - * @param toSvgDataURL the `toSvgDataURL` function to use - * (from `lean-qr/extras/svg`). - * @param defaultProps optional default properties to apply when the - * component is used (overridden by properties set on use). - * @returns a component which can be rendered elsewhere. - */ - export function makeSyncComponent( - framework: Readonly>, - generate: ( - data: string, - options?: Readonly, - ) => Pick, - toSvgDataURL: typeof toSvgDataURLFn, - defaultProps?: Readonly>, - ): SyncQRComponent; -} - -declare module 'lean-qr/extras/vue' { - import type { - Bitmap2D as FullBitmap2D, - GenerateOptions, - ImageDataOptions, - } from 'lean-qr'; - import type { - SVGOptions, - toSvgDataURL as toSvgDataURLFn, - } from 'lean-qr/extras/svg'; - - export interface Framework { - h: - | ((type: 'canvas', props: { ref: string; style: string }) => T) - | ((type: 'img', props: { src: string; style: string }) => T); - } - - interface QRComponentProps { - content: string; - } - - export interface VueCanvasComponentProps - extends ImageDataOptions, - GenerateOptions, - QRComponentProps {} - - type VueComponentDefinition = { - props: { - [k in keyof Props]: { - type: { - (): Props[k]; - required: undefined extends Props[k] ? false : true; - }; - }; - }; - render: () => Node; - } & ThisType; - - /** - * Generate a QR component which renders to a `canvas`. - * You should call this just once, in the global scope. - * - * ```js - * import { h, defineComponent } from 'vue'; - * import { generate } from 'lean-qr'; - * import { makeVueCanvasComponent } from 'lean-qr/extras/vue'; - * export const QR = defineComponent(makeVueCanvasComponent({ h }, generate)); - * ``` - * - * This is not suitable for server-side rendering (use `makeSyncComponent` - * instead). - * - * @param framework the framework to use (e.g. `{ h }`). - * @param generate the `generate` function to use - * (from `lean-qr` or `lean-qr/nano`). - * @param defaultProps optional default properties to apply when the - * component is used (overridden by properties set on use). - * @returns a component which can be rendered elsewhere. - */ - export function makeVueCanvasComponent( - framework: Readonly>, - generate: ( - data: string, - options?: Readonly, - ) => Pick, - defaultProps?: Readonly>, - ): VueComponentDefinition, T>; - - export interface VueSVGComponentProps - extends SVGOptions, - GenerateOptions, - QRComponentProps {} - - /** - * Generate a QR component which renders to an SVG. - * You should call this just once, in the global scope: - * - * ```js - * import { h, defineComponent } from 'vue'; - * import { generate } from 'lean-qr'; - * import { toSvgDataURL } from 'lean-qr/extras/svg'; - * import { makeVueSvgComponent } from 'lean-qr/extras/vue'; - * export const QR = defineComponent(makeVueSvgComponent({ h }, generate, toSvgDataURL)); - * ``` - * - * This is best suited for server-side rendering (prefer - * `makeAsyncComponent` if you only need client-side rendering). - * - * @param framework the framework to use (e.g. `{ h }`). - * @param generate the `generate` function to use - * (from `lean-qr` or `lean-qr/nano`). - * @param toSvgDataURL the `toSvgDataURL` function to use - * (from `lean-qr/extras/svg`). - * @param defaultProps optional default properties to apply when the - * component is used (overridden by properties set on use). - * @returns a component which can be rendered elsewhere. - */ - export function makeVueSvgComponent( - framework: Readonly>, - generate: ( - data: string, - options?: Readonly, - ) => Pick, - toSvgDataURL: typeof toSvgDataURLFn, - defaultProps?: Readonly>, - ): VueComponentDefinition, T>; -} - -declare module 'lean-qr/extras/errors' { - /** - * Convert an error into a human-readable message. This is intended for use - * with Lean QR errors, but will return somewhat meaningful messages for - * other errors too. - * - * @param error the error to convert. - * @returns a human-readable message explaining the error. - */ - export function readError(error: unknown): string; -} diff --git a/modules/lean-qr/2.6.0/index.mjs b/modules/lean-qr/2.6.0/index.mjs deleted file mode 100644 index 163dd46b9..000000000 --- a/modules/lean-qr/2.6.0/index.mjs +++ /dev/null @@ -1,2 +0,0 @@ -// @ts-nocheck -const t=[.2,3/8,5/9,2/3],o=(o,e)=>r=>{const n=4*o+r-4,s="*-04-39?2$%%$%%'$%''%'''%')(%'))%(++'(++'(+.'+-.',/3',33)-/5)-43).36)058*18<+37<+4:<,4:E,5C/8@F/:EH/8?s:1,c=e/f|0,i=e%f,l=f-i,a=n>8?c*t[r]+(o>5)&-2:s,_=c-a;return{t:l*_+i*_+i,o:[[l,_],[i,_+1]],i:a}},e={min:0,L:0,M:1,Q:2,H:3,max:3},r=t=>new Uint8Array(t),n=t=>{const o=new Error(`lean-qr error ${t}`);throw o.code=t,o},s=t=>"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:".indexOf(t),f=t=>t.charCodeAt(0),c=(...t)=>(o,e)=>t.forEach(t=>t(o,e)),i=t=>o=>{o.eci!==t&&(o.push(7,4),o.push(t,8),o.eci=t)},l=t=>(o,e)=>{o.push(4,4),o.push(t.length,8+8*(e>9)),t.forEach(t=>o.push(t,8))},a=(t,o,e,r,n=(t,o)=>e(t.length,o),s=(r?o=>c(i(r),t(o)):t))=>(s.test=o,s.l=e,s.est=n,s.eci=r&&[r],s),_=a(t=>(o,e)=>{o.push(1,4),o.push(t.length,10+2*(e>26)+2*(e>9));let r=0;for(;r/[0-9]/.test(t),(t,o)=>14+2*(o>26)+2*(o>9)+10*t/3),u=a(t=>(o,e)=>{o.push(2,4),o.push(t.length,9+2*(e>26)+2*(e>9));let r=0;for(;rs(t)>=0,(t,o)=>13+2*(o>26)+2*(o>9)+5.5*t),d=a(t=>l([...t].map(f)),t=>f(t)<128,(t,o)=>12+8*(o>9)+8*t);d._=!0,d.u=!0;const p=a(d,t=>f(t)<256,d.l,3);p._=!0;const m=new TextEncoder,h=a(t=>l(m.encode(t)),()=>1,0,26,(t,o)=>12+8*(o>9)+8*m.encode(t).length);h._=!0;let w=()=>{const t=new Map,o=new TextDecoder("sjis"),e=r(2);for(let r=0;r<7973;++r)e[0]=r/192+129+64*(r>5951),e[1]=r%192+64,t.set(o.decode(e),r);return t.delete("\ufffd"),w=()=>t,t};const y=a(t=>(o,e)=>{o.push(8,4),o.push(t.length,8+2*(e>26)+2*(e>9));for(const e of t)o.push(w().get(e),13)},t=>w().has(t),(t,o)=>12+2*(o>26)+2*(o>9)+13*t);y._=!0;const g=[_,u,d,p,y,h],b={auto:(t,{modes:o=g}={})=>(e,r)=>{const s=o.map((o,e)=>{const n=new Map,s=(t,o)=>(n.has(t)||n.set(t,o(t,r)),n.get(t));return{m:o,h:1<s(e-t,o.l):(e,r)=>s(t.slice(e,r),o.est)}});let f=[{v:0}],c=0,i=0,l=-1;for(const o of[...t,""]){let t=0;if(o)for(const e of s)e.m.test(o)&&(t|=e.h);if(!o||t!==l){if(-1!==l){const t=new Set(f.map(t=>t.D)),o=[];for(const{m:e,C:r,S:n,h:a}of s)if(l&a){const s=n(c,i);for(const l of e.eci??t)if(!e.u||!l){let t;for(const o of f)if(o.D===l||e.eci){const f=o.m===e&&o.D===l,a=f?o.V:o,_=e._&&f?o.v+s-r:a.v+12*(a.D!==l)+(f?n(f?o.$:c,i):s);(!t||_o.vt(e,r))},multi:c,eci:i,bytes:l,numeric:_,alphaNumeric:u,ascii:d,iso8859_1:p,shift_jis:y,utf8:h},C=()=>({F:r(2956),I:0,push(t,o){for(let e=o,r=8-(7&this.I);e>0;e-=r,r=8)this.F[this.I>>3]|=t<>e,this.I+=e({size:t,K:e,get:(o,r)=>o>=0&&o1&(t^o),(t,o)=>1&o,t=>t%3,(t,o)=>(t+o)%3,(t,o)=>1&(t/3^o>>1),(t,o)=>(t&o&1)+t*o%3,(t,o)=>(t&o)+t*o%3&1,(t,o)=>(t^o)+t*o%3&1],E=r(511);for(let t=0,o=1;t<255;o=2*o^285*(o>127))E[E[o+255]=t++]=o;const M=t=>E[t%255],S=t=>E[t+255],v=(t,o)=>{const e=r(t.length+o.length-1);for(let r=0;r{const e=r(t.length+o.length-1);e.set(t,0);for(let r=0;r{const e=[[],[]];let n=0,s=0;for(const[r,f]of o.o)for(let c=0;c{let r=t<<=e;for(let t=134217728;t>>=1;)r&t&&(r^=o*(t>>e));return r|t},A=({size:t,K:o},e)=>{const r=(e,r,n,s)=>{for(;n-- >0;e+=t)o.fill(s,e,e+r)},n=(o,e,n)=>{for(let s=0;s++<3;n-=2)r(e*t+o-(n>>1)*(t+1),n,n,2|s)},s=2*((t-13)/(1+(e/7|0))/2+.75|0);if(e>1)for(let o=t-7;o>8;o-=s){for(let t=o;t>8;t-=s)n(o,t,5);o6)for(let r=$(e,7973,12),n=1;n<7;++n)for(let e=12;e-- >9;r>>=1)o[n*t-e]=2|1&r;r(7,2,9,2),r(t-8,8,9,2);for(let e=0;e{const e=[];for(let r=t-2,n=t,s=-1;r>=0;r-=2){for(5===r&&(r=4);n+=s,-1!==n&&n!==t;){const s=n*t+r;o[s+1]||e.push(s+1),o[s]||e.push(s)}s*=-1}return e},H=({K:t},o,e)=>o.forEach((o,r)=>t[o]=e[r>>3]>>(7&~r)&1),I=({size:t,K:o},e,r,n)=>{for(let r=0;r>=1)o[(e-(e<7))*t+8]=1&s,o[9*t-e]=1&s;for(let e=8;--e,s;s>>=1)o[8*t+e-(e<7)]=1&s,o[(t-e)*t+8]=1&s},K=({size:t,K:o},e=0,r=0)=>{for(let n=0;n>1|2098176)&(3047517^a-1),2049&i&&(e+=40),a!==f&&(l=0),f=a,e+=5===++l?3:l>5}if(n)for(let r=t+n,s=5*o[n-1]^o[n];r{r>3);if(8*f(e).t=e;--t){const o=f(t);if(8*o.t{const n=x(e.size,e.K);return I(n,o,c??r,t),n.s=K(n),n}).reduce((t,o)=>o.s(o,e)=>P(o,{modes:[...g,...t],...e});export{e as correction,P as generate,b as mode}; diff --git a/packages/brk_client/brk_client/__init__.py b/packages/brk_client/brk_client/__init__.py index 68e73280c..bd31cc685 100644 --- a/packages/brk_client/brk_client/__init__.py +++ b/packages/brk_client/brk_client/__init__.py @@ -41,13 +41,13 @@ class AddressTxidsParam(TypedDict): limit: int class AddressValidation(TypedDict): - isvalid: bool address: Optional[str] - scriptPubKey: Optional[str] isscript: Optional[bool] + isvalid: bool iswitness: Optional[bool] - witness_version: Optional[int] + scriptPubKey: Optional[str] witness_program: Optional[str] + witness_version: Optional[int] AnyAddressIndex = TypeIndex Bitcoin = float @@ -58,9 +58,9 @@ class BlockCountParam(TypedDict): Height = int Timestamp = int class BlockFeesEntry(TypedDict): + avgFees: Sats avgHeight: Height timestamp: Timestamp - avgFees: Sats BlockHash = str class BlockHashParam(TypedDict): @@ -77,102 +77,102 @@ class BlockHashTxIndex(TypedDict): Weight = int class BlockInfo(TypedDict): - id: BlockHash - height: Height - tx_count: int - size: int - weight: Weight - timestamp: Timestamp difficulty: float + height: Height + id: BlockHash + size: int + timestamp: Timestamp + tx_count: int + weight: Weight class BlockRewardsEntry(TypedDict): avgHeight: int - timestamp: int avgRewards: int + timestamp: int class BlockSizeEntry(TypedDict): avgHeight: int - timestamp: int avgSize: int + timestamp: int class BlockWeightEntry(TypedDict): avgHeight: int - timestamp: int avgWeight: int + timestamp: int class BlockSizesWeights(TypedDict): sizes: List[BlockSizeEntry] weights: List[BlockWeightEntry] class BlockStatus(TypedDict): - in_best_chain: bool height: Union[Height, None] + in_best_chain: bool next_best: Union[BlockHash, None] class BlockTimestamp(TypedDict): - height: Height hash: BlockHash + height: Height timestamp: str Cents = int Close = Cents Format = Literal["json", "csv"] class DataRangeFormat(TypedDict): - from_: Optional[int] - to: Optional[int] count: Optional[int] format: Format + from_: Optional[int] + to: Optional[int] Date = int DateIndex = int DecadeIndex = int class DifficultyAdjustment(TypedDict): - progressPercent: float + adjustedTimeAvg: int difficultyChange: float estimatedRetargetDate: int + nextRetargetHeight: Height + previousRetarget: float + progressPercent: float remainingBlocks: int remainingTime: int - previousRetarget: float - nextRetargetHeight: Height timeAvg: int - adjustedTimeAvg: int timeOffset: int class DifficultyAdjustmentEntry(TypedDict): - timestamp: Timestamp - height: Height - difficulty: float change_percent: float + difficulty: float + height: Height + timestamp: Timestamp class DifficultyEntry(TypedDict): - timestamp: Timestamp difficulty: float height: Height + timestamp: Timestamp DifficultyEpoch = int Dollars = float class EmptyAddressData(TypedDict): - tx_count: int funded_txo_count: int transfered: Sats + tx_count: int EmptyAddressIndex = TypeIndex EmptyOutputIndex = TypeIndex FeeRate = float HalvingEpoch = int class HashrateEntry(TypedDict): - timestamp: Timestamp avgHashrate: int + timestamp: Timestamp class HashrateSummary(TypedDict): - hashrates: List[HashrateEntry] - difficulty: List[DifficultyEntry] - currentHashrate: int currentDifficulty: float + currentHashrate: int + difficulty: List[DifficultyEntry] + hashrates: List[HashrateEntry] class Health(TypedDict): - status: str service: str + status: str timestamp: str class HeightParam(TypedDict): @@ -181,87 +181,87 @@ class HeightParam(TypedDict): Hex = str High = Cents class IndexInfo(TypedDict): - index: Index aliases: List[str] + index: Index Limit = int class LimitParam(TypedDict): limit: Limit class LoadedAddressData(TypedDict): - tx_count: int funded_txo_count: int - spent_txo_count: int + realized_cap: Dollars received: Sats sent: Sats - realized_cap: Dollars + spent_txo_count: int + tx_count: int LoadedAddressIndex = TypeIndex Low = Cents class MempoolBlock(TypedDict): blockSize: int blockVSize: float + feeRange: List[FeeRate] + medianFee: FeeRate nTx: int totalFees: Sats - medianFee: FeeRate - feeRange: List[FeeRate] VSize = int class MempoolInfo(TypedDict): count: int - vsize: VSize total_fee: Sats + vsize: VSize Metric = str class MetricCount(TypedDict): distinct_metrics: int - total_endpoints: int lazy_endpoints: int stored_endpoints: int + total_endpoints: int class MetricParam(TypedDict): metric: Metric Metrics = str class MetricSelection(TypedDict): - metrics: Metrics - index: Index - from_: Optional[int] - to: Optional[int] count: Optional[int] format: Format + from_: Optional[int] + index: Index + metrics: Metrics + to: Optional[int] class MetricSelectionLegacy(TypedDict): - index: Index - ids: Metrics - from_: Optional[int] - to: Optional[int] count: Optional[int] format: Format + from_: Optional[int] + ids: Metrics + index: Index + to: Optional[int] class MetricWithIndex(TypedDict): - metric: Metric index: Index + metric: Metric MonthIndex = int Open = Cents class OHLCCents(TypedDict): - open: Open + close: Close high: High low: Low - close: Close + open: Open class OHLCDollars(TypedDict): - open: Open + close: Close high: High low: Low - close: Close + open: Open class OHLCSats(TypedDict): - open: Open + close: Close high: High low: Low - close: Close + open: Open OpReturnIndex = TypeIndex OutPoint = int @@ -297,29 +297,29 @@ class Pagination(TypedDict): page: Optional[int] class PoolBlockCounts(TypedDict): - all: int - _24h: int _1w: int + _24h: int + all: int class PoolBlockShares(TypedDict): - all: float - _24h: float _1w: float + _24h: float + all: float PoolSlug = Literal["unknown", "blockfills", "ultimuspool", "terrapool", "luxor", "onethash", "btccom", "bitfarms", "huobipool", "wayicn", "canoepool", "btctop", "bitcoincom", "pool175btc", "gbminers", "axbt", "asicminer", "bitminter", "bitcoinrussia", "btcserv", "simplecoinus", "btcguild", "eligius", "ozcoin", "eclipsemc", "maxbtc", "triplemining", "coinlab", "pool50btc", "ghashio", "stminingcorp", "bitparking", "mmpool", "polmine", "kncminer", "bitalo", "f2pool", "hhtt", "megabigpower", "mtred", "nmcbit", "yourbtcnet", "givemecoins", "braiinspool", "antpool", "multicoinco", "bcpoolio", "cointerra", "kanopool", "solock", "ckpool", "nicehash", "bitclub", "bitcoinaffiliatenetwork", "btcc", "bwpool", "exxbw", "bitsolo", "bitfury", "twentyoneinc", "digitalbtc", "eightbaochi", "mybtccoinpool", "tbdice", "hashpool", "nexious", "bravomining", "hotpool", "okexpool", "bcmonster", "onehash", "bixin", "tatmaspool", "viabtc", "connectbtc", "batpool", "waterhole", "dcexploration", "dcex", "btpool", "fiftyeightcoin", "bitcoinindia", "shawnp0wers", "phashio", "rigpool", "haozhuzhu", "sevenpool", "miningkings", "hashbx", "dpool", "rawpool", "haominer", "helix", "bitcoinukraine", "poolin", "secretsuperstar", "tigerpoolnet", "sigmapoolcom", "okpooltop", "hummerpool", "tangpool", "bytepool", "spiderpool", "novablock", "miningcity", "binancepool", "minerium", "lubiancom", "okkong", "aaopool", "emcdpool", "foundryusa", "sbicrypto", "arkpool", "purebtccom", "marapool", "kucoinpool", "entrustcharitypool", "okminer", "titan", "pegapool", "btcnuggets", "cloudhashing", "digitalxmintsy", "telco214", "btcpoolparty", "multipool", "transactioncoinmining", "btcdig", "trickysbtcpool", "btcmp", "eobot", "unomp", "patels", "gogreenlight", "ekanembtc", "canoe", "tiger", "onem1x", "zulupool", "secpool", "ocean", "whitepool", "wk057", "futurebitapollosolo", "carbonnegative", "portlandhodl", "phoenix", "neopool", "maxipool", "bitfufupool", "luckypool", "miningdutch", "publicpool", "miningsquared", "innopolistech", "btclab", "parasite"] class PoolDetailInfo(TypedDict): - id: int - name: str - link: str addresses: List[str] + id: int + link: str + name: str regexes: List[str] slug: PoolSlug class PoolDetail(TypedDict): - pool: PoolDetailInfo blockCount: PoolBlockCounts blockShare: PoolBlockShares estimatedHashrate: int + pool: PoolDetailInfo reportedHashrate: Optional[int] class PoolInfo(TypedDict): @@ -331,34 +331,34 @@ class PoolSlugParam(TypedDict): slug: PoolSlug class PoolStats(TypedDict): - poolId: int - name: str - link: str blockCount: int - rank: int emptyBlocks: int - slug: PoolSlug + link: str + name: str + poolId: int + rank: int share: float + slug: PoolSlug class PoolsSummary(TypedDict): - pools: List[PoolStats] blockCount: int lastEstimatedHashrate: int + pools: List[PoolStats] QuarterIndex = int RawLockTime = int class RecommendedFees(TypedDict): + economyFee: FeeRate fastestFee: FeeRate halfHourFee: FeeRate hourFee: FeeRate - economyFee: FeeRate minimumFee: FeeRate class RewardStats(TypedDict): - startBlock: Height endBlock: Height - totalReward: Sats + startBlock: Height totalFee: Sats + totalReward: Sats totalTx: int SemesterIndex = int @@ -386,43 +386,43 @@ class TxOut(TypedDict): Vout = int class TxIn(TypedDict): - txid: Txid - vout: Vout + inner_redeemscript_asm: Optional[str] + is_coinbase: bool prevout: Union[TxOut, None] scriptsig: str scriptsig_asm: str - is_coinbase: bool sequence: int - inner_redeemscript_asm: Optional[str] + txid: Txid + vout: Vout class TxStatus(TypedDict): - confirmed: bool - block_height: Union[Height, None] block_hash: Union[BlockHash, None] + block_height: Union[Height, None] block_time: Union[Timestamp, None] + confirmed: bool TxVersion = int class Transaction(TypedDict): + fee: Sats index: Union[TxIndex, None] + locktime: RawLockTime + sigops: int + size: int + status: TxStatus txid: Txid version: TxVersion - locktime: RawLockTime - size: int - weight: Weight - sigops: int - fee: Sats vin: List[TxIn] vout: List[TxOut] - status: TxStatus + weight: Weight TxInIndex = int TxOutIndex = int Vin = int class TxOutspend(TypedDict): spent: bool + status: Union[TxStatus, None] txid: Union[Txid, None] vin: Union[Vin, None] - status: Union[TxStatus, None] class TxidParam(TypedDict): txid: Txid @@ -433,10 +433,10 @@ class TxidVout(TypedDict): UnknownOutputIndex = TypeIndex class Utxo(TypedDict): - txid: Txid - vout: Vout status: TxStatus + txid: Txid value: Sats + vout: Vout class ValidateAddressParam(TypedDict): address: str @@ -445,9 +445,10 @@ WeekIndex = int YearIndex = int Index = Literal["dateindex", "decadeindex", "difficultyepoch", "emptyoutputindex", "halvingepoch", "height", "txinindex", "monthindex", "opreturnindex", "txoutindex", "p2aaddressindex", "p2msoutputindex", "p2pk33addressindex", "p2pk65addressindex", "p2pkhaddressindex", "p2shaddressindex", "p2traddressindex", "p2wpkhaddressindex", "p2wshaddressindex", "quarterindex", "semesterindex", "txindex", "unknownoutputindex", "weekindex", "yearindex", "loadedaddressindex", "emptyaddressindex"] class MetricLeafWithSchema(TypedDict): - name: str - value_type: str indexes: List[Index] + kind: str + name: str + type: str TreeNode = Union[dict[str, "TreeNode"], MetricLeafWithSchema] @@ -560,7 +561,7 @@ class MetricPattern(Protocol[T]): class _MetricPattern1By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -594,7 +595,7 @@ class _MetricPattern1By(Generic[T]): class MetricPattern1(Generic[T]): """Index accessor for metrics with 9 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -624,7 +625,7 @@ class MetricPattern1(Generic[T]): class _MetricPattern2By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -655,7 +656,7 @@ class _MetricPattern2By(Generic[T]): class MetricPattern2(Generic[T]): """Index accessor for metrics with 8 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -684,7 +685,7 @@ class MetricPattern2(Generic[T]): class _MetricPattern3By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -715,7 +716,7 @@ class _MetricPattern3By(Generic[T]): class MetricPattern3(Generic[T]): """Index accessor for metrics with 8 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -744,67 +745,7 @@ class MetricPattern3(Generic[T]): class _MetricPattern4By(Generic[T]): """Index endpoint methods container.""" - - def __init__(self, client: BrkClientBase, name: str): - self._client = client - self._name = name - def by_decadeindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'decadeindex') - - def by_difficultyepoch(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'difficultyepoch') - - def by_height(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'height') - - def by_monthindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'monthindex') - - def by_quarterindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'quarterindex') - - def by_semesterindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'semesterindex') - - def by_weekindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'weekindex') - - def by_yearindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'yearindex') - -class MetricPattern4(Generic[T]): - """Index accessor for metrics with 8 indexes.""" - - def __init__(self, client: BrkClientBase, name: str): - self._client = client - self._name = name - self.by: _MetricPattern4By[T] = _MetricPattern4By(client, name) - - @property - def name(self) -> str: - """Get the metric name.""" - return self._name - - def indexes(self) -> List[str]: - """Get the list of available indexes.""" - return ['decadeindex', 'difficultyepoch', 'height', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex'] - - def get(self, index: str) -> Optional[MetricEndpoint[T]]: - """Get an endpoint for a specific index, if supported.""" - if index == 'decadeindex': return self.by.by_decadeindex() - elif index == 'difficultyepoch': return self.by.by_difficultyepoch() - elif index == 'height': return self.by.by_height() - elif index == 'monthindex': return self.by.by_monthindex() - elif index == 'quarterindex': return self.by.by_quarterindex() - elif index == 'semesterindex': return self.by.by_semesterindex() - elif index == 'weekindex': return self.by.by_weekindex() - elif index == 'yearindex': return self.by.by_yearindex() - return None - -class _MetricPattern5By(Generic[T]): - """Index endpoint methods container.""" - def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -830,13 +771,13 @@ class _MetricPattern5By(Generic[T]): def by_yearindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'yearindex') -class MetricPattern5(Generic[T]): +class MetricPattern4(Generic[T]): """Index accessor for metrics with 7 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern5By[T] = _MetricPattern5By(client, name) + self.by: _MetricPattern4By[T] = _MetricPattern4By(client, name) @property def name(self) -> str: @@ -858,65 +799,9 @@ class MetricPattern5(Generic[T]): elif index == 'yearindex': return self.by.by_yearindex() return None -class _MetricPattern6By(Generic[T]): +class _MetricPattern5By(Generic[T]): """Index endpoint methods container.""" - - def __init__(self, client: BrkClientBase, name: str): - self._client = client - self._name = name - def by_decadeindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'decadeindex') - - def by_difficultyepoch(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'difficultyepoch') - - def by_monthindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'monthindex') - - def by_quarterindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'quarterindex') - - def by_semesterindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'semesterindex') - - def by_weekindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'weekindex') - - def by_yearindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'yearindex') - -class MetricPattern6(Generic[T]): - """Index accessor for metrics with 7 indexes.""" - - def __init__(self, client: BrkClientBase, name: str): - self._client = client - self._name = name - self.by: _MetricPattern6By[T] = _MetricPattern6By(client, name) - - @property - def name(self) -> str: - """Get the metric name.""" - return self._name - - def indexes(self) -> List[str]: - """Get the list of available indexes.""" - return ['decadeindex', 'difficultyepoch', 'monthindex', 'quarterindex', 'semesterindex', 'weekindex', 'yearindex'] - - def get(self, index: str) -> Optional[MetricEndpoint[T]]: - """Get an endpoint for a specific index, if supported.""" - if index == 'decadeindex': return self.by.by_decadeindex() - elif index == 'difficultyepoch': return self.by.by_difficultyepoch() - elif index == 'monthindex': return self.by.by_monthindex() - elif index == 'quarterindex': return self.by.by_quarterindex() - elif index == 'semesterindex': return self.by.by_semesterindex() - elif index == 'weekindex': return self.by.by_weekindex() - elif index == 'yearindex': return self.by.by_yearindex() - return None - -class _MetricPattern7By(Generic[T]): - """Index endpoint methods container.""" - def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -939,13 +824,13 @@ class _MetricPattern7By(Generic[T]): def by_yearindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'yearindex') -class MetricPattern7(Generic[T]): +class MetricPattern5(Generic[T]): """Index accessor for metrics with 6 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern7By[T] = _MetricPattern7By(client, name) + self.by: _MetricPattern5By[T] = _MetricPattern5By(client, name) @property def name(self) -> str: @@ -966,53 +851,9 @@ class MetricPattern7(Generic[T]): elif index == 'yearindex': return self.by.by_yearindex() return None -class _MetricPattern8By(Generic[T]): +class _MetricPattern6By(Generic[T]): """Index endpoint methods container.""" - - def __init__(self, client: BrkClientBase, name: str): - self._client = client - self._name = name - def by_emptyoutputindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'emptyoutputindex') - - def by_opreturnindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'opreturnindex') - - def by_p2msoutputindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'p2msoutputindex') - - def by_unknownoutputindex(self) -> MetricEndpoint[T]: - return MetricEndpoint(self._client, self._name, 'unknownoutputindex') - -class MetricPattern8(Generic[T]): - """Index accessor for metrics with 4 indexes.""" - - def __init__(self, client: BrkClientBase, name: str): - self._client = client - self._name = name - self.by: _MetricPattern8By[T] = _MetricPattern8By(client, name) - - @property - def name(self) -> str: - """Get the metric name.""" - return self._name - - def indexes(self) -> List[str]: - """Get the list of available indexes.""" - return ['emptyoutputindex', 'opreturnindex', 'p2msoutputindex', 'unknownoutputindex'] - - def get(self, index: str) -> Optional[MetricEndpoint[T]]: - """Get an endpoint for a specific index, if supported.""" - if index == 'emptyoutputindex': return self.by.by_emptyoutputindex() - elif index == 'opreturnindex': return self.by.by_opreturnindex() - elif index == 'p2msoutputindex': return self.by.by_p2msoutputindex() - elif index == 'unknownoutputindex': return self.by.by_unknownoutputindex() - return None - -class _MetricPattern9By(Generic[T]): - """Index endpoint methods container.""" - def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1026,13 +867,13 @@ class _MetricPattern9By(Generic[T]): def by_yearindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'yearindex') -class MetricPattern9(Generic[T]): +class MetricPattern6(Generic[T]): """Index accessor for metrics with 3 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern9By[T] = _MetricPattern9By(client, name) + self.by: _MetricPattern6By[T] = _MetricPattern6By(client, name) @property def name(self) -> str: @@ -1050,9 +891,9 @@ class MetricPattern9(Generic[T]): elif index == 'yearindex': return self.by.by_yearindex() return None -class _MetricPattern10By(Generic[T]): +class _MetricPattern7By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1063,13 +904,13 @@ class _MetricPattern10By(Generic[T]): def by_height(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'height') -class MetricPattern10(Generic[T]): +class MetricPattern7(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern10By[T] = _MetricPattern10By(client, name) + self.by: _MetricPattern7By[T] = _MetricPattern7By(client, name) @property def name(self) -> str: @@ -1086,9 +927,9 @@ class MetricPattern10(Generic[T]): elif index == 'height': return self.by.by_height() return None -class _MetricPattern11By(Generic[T]): +class _MetricPattern8By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1099,13 +940,13 @@ class _MetricPattern11By(Generic[T]): def by_monthindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'monthindex') -class MetricPattern11(Generic[T]): +class MetricPattern8(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern11By[T] = _MetricPattern11By(client, name) + self.by: _MetricPattern8By[T] = _MetricPattern8By(client, name) @property def name(self) -> str: @@ -1122,9 +963,9 @@ class MetricPattern11(Generic[T]): elif index == 'monthindex': return self.by.by_monthindex() return None -class _MetricPattern12By(Generic[T]): +class _MetricPattern9By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1135,13 +976,13 @@ class _MetricPattern12By(Generic[T]): def by_weekindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'weekindex') -class MetricPattern12(Generic[T]): +class MetricPattern9(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern12By[T] = _MetricPattern12By(client, name) + self.by: _MetricPattern9By[T] = _MetricPattern9By(client, name) @property def name(self) -> str: @@ -1158,9 +999,9 @@ class MetricPattern12(Generic[T]): elif index == 'weekindex': return self.by.by_weekindex() return None -class _MetricPattern13By(Generic[T]): +class _MetricPattern10By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1171,13 +1012,13 @@ class _MetricPattern13By(Generic[T]): def by_yearindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'yearindex') -class MetricPattern13(Generic[T]): +class MetricPattern10(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern13By[T] = _MetricPattern13By(client, name) + self.by: _MetricPattern10By[T] = _MetricPattern10By(client, name) @property def name(self) -> str: @@ -1194,9 +1035,9 @@ class MetricPattern13(Generic[T]): elif index == 'yearindex': return self.by.by_yearindex() return None -class _MetricPattern14By(Generic[T]): +class _MetricPattern11By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1207,13 +1048,13 @@ class _MetricPattern14By(Generic[T]): def by_halvingepoch(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'halvingepoch') -class MetricPattern14(Generic[T]): +class MetricPattern11(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern14By[T] = _MetricPattern14By(client, name) + self.by: _MetricPattern11By[T] = _MetricPattern11By(client, name) @property def name(self) -> str: @@ -1230,9 +1071,9 @@ class MetricPattern14(Generic[T]): elif index == 'halvingepoch': return self.by.by_halvingepoch() return None -class _MetricPattern15By(Generic[T]): +class _MetricPattern12By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1243,13 +1084,13 @@ class _MetricPattern15By(Generic[T]): def by_height(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'height') -class MetricPattern15(Generic[T]): +class MetricPattern12(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern15By[T] = _MetricPattern15By(client, name) + self.by: _MetricPattern12By[T] = _MetricPattern12By(client, name) @property def name(self) -> str: @@ -1266,9 +1107,9 @@ class MetricPattern15(Generic[T]): elif index == 'height': return self.by.by_height() return None -class _MetricPattern16By(Generic[T]): +class _MetricPattern13By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1279,13 +1120,13 @@ class _MetricPattern16By(Generic[T]): def by_height(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'height') -class MetricPattern16(Generic[T]): +class MetricPattern13(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern16By[T] = _MetricPattern16By(client, name) + self.by: _MetricPattern13By[T] = _MetricPattern13By(client, name) @property def name(self) -> str: @@ -1302,9 +1143,9 @@ class MetricPattern16(Generic[T]): elif index == 'height': return self.by.by_height() return None -class _MetricPattern17By(Generic[T]): +class _MetricPattern14By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1315,13 +1156,13 @@ class _MetricPattern17By(Generic[T]): def by_txindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'txindex') -class MetricPattern17(Generic[T]): +class MetricPattern14(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern17By[T] = _MetricPattern17By(client, name) + self.by: _MetricPattern14By[T] = _MetricPattern14By(client, name) @property def name(self) -> str: @@ -1338,9 +1179,9 @@ class MetricPattern17(Generic[T]): elif index == 'txindex': return self.by.by_txindex() return None -class _MetricPattern18By(Generic[T]): +class _MetricPattern15By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1351,13 +1192,13 @@ class _MetricPattern18By(Generic[T]): def by_quarterindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'quarterindex') -class MetricPattern18(Generic[T]): +class MetricPattern15(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern18By[T] = _MetricPattern18By(client, name) + self.by: _MetricPattern15By[T] = _MetricPattern15By(client, name) @property def name(self) -> str: @@ -1374,9 +1215,9 @@ class MetricPattern18(Generic[T]): elif index == 'quarterindex': return self.by.by_quarterindex() return None -class _MetricPattern19By(Generic[T]): +class _MetricPattern16By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1387,13 +1228,13 @@ class _MetricPattern19By(Generic[T]): def by_semesterindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'semesterindex') -class MetricPattern19(Generic[T]): +class MetricPattern16(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern19By[T] = _MetricPattern19By(client, name) + self.by: _MetricPattern16By[T] = _MetricPattern16By(client, name) @property def name(self) -> str: @@ -1410,9 +1251,9 @@ class MetricPattern19(Generic[T]): elif index == 'semesterindex': return self.by.by_semesterindex() return None -class _MetricPattern20By(Generic[T]): +class _MetricPattern17By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1423,13 +1264,13 @@ class _MetricPattern20By(Generic[T]): def by_weekindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'weekindex') -class MetricPattern20(Generic[T]): +class MetricPattern17(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern20By[T] = _MetricPattern20By(client, name) + self.by: _MetricPattern17By[T] = _MetricPattern17By(client, name) @property def name(self) -> str: @@ -1446,9 +1287,9 @@ class MetricPattern20(Generic[T]): elif index == 'weekindex': return self.by.by_weekindex() return None -class _MetricPattern21By(Generic[T]): +class _MetricPattern18By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1459,13 +1300,13 @@ class _MetricPattern21By(Generic[T]): def by_yearindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'yearindex') -class MetricPattern21(Generic[T]): +class MetricPattern18(Generic[T]): """Index accessor for metrics with 2 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern21By[T] = _MetricPattern21By(client, name) + self.by: _MetricPattern18By[T] = _MetricPattern18By(client, name) @property def name(self) -> str: @@ -1482,9 +1323,9 @@ class MetricPattern21(Generic[T]): elif index == 'yearindex': return self.by.by_yearindex() return None -class _MetricPattern22By(Generic[T]): +class _MetricPattern19By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1492,13 +1333,13 @@ class _MetricPattern22By(Generic[T]): def by_dateindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'dateindex') -class MetricPattern22(Generic[T]): +class MetricPattern19(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern22By[T] = _MetricPattern22By(client, name) + self.by: _MetricPattern19By[T] = _MetricPattern19By(client, name) @property def name(self) -> str: @@ -1514,9 +1355,9 @@ class MetricPattern22(Generic[T]): if index == 'dateindex': return self.by.by_dateindex() return None -class _MetricPattern23By(Generic[T]): +class _MetricPattern20By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1524,13 +1365,13 @@ class _MetricPattern23By(Generic[T]): def by_decadeindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'decadeindex') -class MetricPattern23(Generic[T]): +class MetricPattern20(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern23By[T] = _MetricPattern23By(client, name) + self.by: _MetricPattern20By[T] = _MetricPattern20By(client, name) @property def name(self) -> str: @@ -1546,9 +1387,9 @@ class MetricPattern23(Generic[T]): if index == 'decadeindex': return self.by.by_decadeindex() return None -class _MetricPattern24By(Generic[T]): +class _MetricPattern21By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1556,13 +1397,13 @@ class _MetricPattern24By(Generic[T]): def by_difficultyepoch(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'difficultyepoch') -class MetricPattern24(Generic[T]): +class MetricPattern21(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern24By[T] = _MetricPattern24By(client, name) + self.by: _MetricPattern21By[T] = _MetricPattern21By(client, name) @property def name(self) -> str: @@ -1578,9 +1419,9 @@ class MetricPattern24(Generic[T]): if index == 'difficultyepoch': return self.by.by_difficultyepoch() return None -class _MetricPattern25By(Generic[T]): +class _MetricPattern22By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1588,13 +1429,13 @@ class _MetricPattern25By(Generic[T]): def by_emptyoutputindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'emptyoutputindex') -class MetricPattern25(Generic[T]): +class MetricPattern22(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern25By[T] = _MetricPattern25By(client, name) + self.by: _MetricPattern22By[T] = _MetricPattern22By(client, name) @property def name(self) -> str: @@ -1610,9 +1451,9 @@ class MetricPattern25(Generic[T]): if index == 'emptyoutputindex': return self.by.by_emptyoutputindex() return None -class _MetricPattern26By(Generic[T]): +class _MetricPattern23By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1620,13 +1461,13 @@ class _MetricPattern26By(Generic[T]): def by_height(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'height') -class MetricPattern26(Generic[T]): +class MetricPattern23(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern26By[T] = _MetricPattern26By(client, name) + self.by: _MetricPattern23By[T] = _MetricPattern23By(client, name) @property def name(self) -> str: @@ -1642,9 +1483,9 @@ class MetricPattern26(Generic[T]): if index == 'height': return self.by.by_height() return None -class _MetricPattern27By(Generic[T]): +class _MetricPattern24By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1652,13 +1493,13 @@ class _MetricPattern27By(Generic[T]): def by_txinindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'txinindex') -class MetricPattern27(Generic[T]): +class MetricPattern24(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern27By[T] = _MetricPattern27By(client, name) + self.by: _MetricPattern24By[T] = _MetricPattern24By(client, name) @property def name(self) -> str: @@ -1674,9 +1515,9 @@ class MetricPattern27(Generic[T]): if index == 'txinindex': return self.by.by_txinindex() return None -class _MetricPattern28By(Generic[T]): +class _MetricPattern25By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1684,13 +1525,13 @@ class _MetricPattern28By(Generic[T]): def by_opreturnindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'opreturnindex') -class MetricPattern28(Generic[T]): +class MetricPattern25(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern28By[T] = _MetricPattern28By(client, name) + self.by: _MetricPattern25By[T] = _MetricPattern25By(client, name) @property def name(self) -> str: @@ -1706,9 +1547,9 @@ class MetricPattern28(Generic[T]): if index == 'opreturnindex': return self.by.by_opreturnindex() return None -class _MetricPattern29By(Generic[T]): +class _MetricPattern26By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1716,13 +1557,13 @@ class _MetricPattern29By(Generic[T]): def by_txoutindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'txoutindex') -class MetricPattern29(Generic[T]): +class MetricPattern26(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern29By[T] = _MetricPattern29By(client, name) + self.by: _MetricPattern26By[T] = _MetricPattern26By(client, name) @property def name(self) -> str: @@ -1738,9 +1579,9 @@ class MetricPattern29(Generic[T]): if index == 'txoutindex': return self.by.by_txoutindex() return None -class _MetricPattern30By(Generic[T]): +class _MetricPattern27By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1748,13 +1589,13 @@ class _MetricPattern30By(Generic[T]): def by_p2aaddressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'p2aaddressindex') -class MetricPattern30(Generic[T]): +class MetricPattern27(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern30By[T] = _MetricPattern30By(client, name) + self.by: _MetricPattern27By[T] = _MetricPattern27By(client, name) @property def name(self) -> str: @@ -1770,9 +1611,9 @@ class MetricPattern30(Generic[T]): if index == 'p2aaddressindex': return self.by.by_p2aaddressindex() return None -class _MetricPattern31By(Generic[T]): +class _MetricPattern28By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1780,13 +1621,13 @@ class _MetricPattern31By(Generic[T]): def by_p2msoutputindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'p2msoutputindex') -class MetricPattern31(Generic[T]): +class MetricPattern28(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern31By[T] = _MetricPattern31By(client, name) + self.by: _MetricPattern28By[T] = _MetricPattern28By(client, name) @property def name(self) -> str: @@ -1802,9 +1643,9 @@ class MetricPattern31(Generic[T]): if index == 'p2msoutputindex': return self.by.by_p2msoutputindex() return None -class _MetricPattern32By(Generic[T]): +class _MetricPattern29By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1812,13 +1653,13 @@ class _MetricPattern32By(Generic[T]): def by_p2pk33addressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'p2pk33addressindex') -class MetricPattern32(Generic[T]): +class MetricPattern29(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern32By[T] = _MetricPattern32By(client, name) + self.by: _MetricPattern29By[T] = _MetricPattern29By(client, name) @property def name(self) -> str: @@ -1834,9 +1675,9 @@ class MetricPattern32(Generic[T]): if index == 'p2pk33addressindex': return self.by.by_p2pk33addressindex() return None -class _MetricPattern33By(Generic[T]): +class _MetricPattern30By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1844,13 +1685,13 @@ class _MetricPattern33By(Generic[T]): def by_p2pk65addressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'p2pk65addressindex') -class MetricPattern33(Generic[T]): +class MetricPattern30(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern33By[T] = _MetricPattern33By(client, name) + self.by: _MetricPattern30By[T] = _MetricPattern30By(client, name) @property def name(self) -> str: @@ -1866,9 +1707,9 @@ class MetricPattern33(Generic[T]): if index == 'p2pk65addressindex': return self.by.by_p2pk65addressindex() return None -class _MetricPattern34By(Generic[T]): +class _MetricPattern31By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1876,13 +1717,13 @@ class _MetricPattern34By(Generic[T]): def by_p2pkhaddressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'p2pkhaddressindex') -class MetricPattern34(Generic[T]): +class MetricPattern31(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern34By[T] = _MetricPattern34By(client, name) + self.by: _MetricPattern31By[T] = _MetricPattern31By(client, name) @property def name(self) -> str: @@ -1898,9 +1739,9 @@ class MetricPattern34(Generic[T]): if index == 'p2pkhaddressindex': return self.by.by_p2pkhaddressindex() return None -class _MetricPattern35By(Generic[T]): +class _MetricPattern32By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1908,13 +1749,13 @@ class _MetricPattern35By(Generic[T]): def by_p2shaddressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'p2shaddressindex') -class MetricPattern35(Generic[T]): +class MetricPattern32(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern35By[T] = _MetricPattern35By(client, name) + self.by: _MetricPattern32By[T] = _MetricPattern32By(client, name) @property def name(self) -> str: @@ -1930,9 +1771,9 @@ class MetricPattern35(Generic[T]): if index == 'p2shaddressindex': return self.by.by_p2shaddressindex() return None -class _MetricPattern36By(Generic[T]): +class _MetricPattern33By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1940,13 +1781,13 @@ class _MetricPattern36By(Generic[T]): def by_p2traddressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'p2traddressindex') -class MetricPattern36(Generic[T]): +class MetricPattern33(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern36By[T] = _MetricPattern36By(client, name) + self.by: _MetricPattern33By[T] = _MetricPattern33By(client, name) @property def name(self) -> str: @@ -1962,9 +1803,9 @@ class MetricPattern36(Generic[T]): if index == 'p2traddressindex': return self.by.by_p2traddressindex() return None -class _MetricPattern37By(Generic[T]): +class _MetricPattern34By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -1972,13 +1813,13 @@ class _MetricPattern37By(Generic[T]): def by_p2wpkhaddressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'p2wpkhaddressindex') -class MetricPattern37(Generic[T]): +class MetricPattern34(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern37By[T] = _MetricPattern37By(client, name) + self.by: _MetricPattern34By[T] = _MetricPattern34By(client, name) @property def name(self) -> str: @@ -1994,9 +1835,9 @@ class MetricPattern37(Generic[T]): if index == 'p2wpkhaddressindex': return self.by.by_p2wpkhaddressindex() return None -class _MetricPattern38By(Generic[T]): +class _MetricPattern35By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -2004,13 +1845,13 @@ class _MetricPattern38By(Generic[T]): def by_p2wshaddressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'p2wshaddressindex') -class MetricPattern38(Generic[T]): +class MetricPattern35(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern38By[T] = _MetricPattern38By(client, name) + self.by: _MetricPattern35By[T] = _MetricPattern35By(client, name) @property def name(self) -> str: @@ -2026,9 +1867,9 @@ class MetricPattern38(Generic[T]): if index == 'p2wshaddressindex': return self.by.by_p2wshaddressindex() return None -class _MetricPattern39By(Generic[T]): +class _MetricPattern36By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -2036,13 +1877,13 @@ class _MetricPattern39By(Generic[T]): def by_txindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'txindex') -class MetricPattern39(Generic[T]): +class MetricPattern36(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern39By[T] = _MetricPattern39By(client, name) + self.by: _MetricPattern36By[T] = _MetricPattern36By(client, name) @property def name(self) -> str: @@ -2058,9 +1899,9 @@ class MetricPattern39(Generic[T]): if index == 'txindex': return self.by.by_txindex() return None -class _MetricPattern40By(Generic[T]): +class _MetricPattern37By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -2068,13 +1909,13 @@ class _MetricPattern40By(Generic[T]): def by_unknownoutputindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'unknownoutputindex') -class MetricPattern40(Generic[T]): +class MetricPattern37(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern40By[T] = _MetricPattern40By(client, name) + self.by: _MetricPattern37By[T] = _MetricPattern37By(client, name) @property def name(self) -> str: @@ -2090,9 +1931,9 @@ class MetricPattern40(Generic[T]): if index == 'unknownoutputindex': return self.by.by_unknownoutputindex() return None -class _MetricPattern41By(Generic[T]): +class _MetricPattern38By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -2100,13 +1941,13 @@ class _MetricPattern41By(Generic[T]): def by_loadedaddressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'loadedaddressindex') -class MetricPattern41(Generic[T]): +class MetricPattern38(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern41By[T] = _MetricPattern41By(client, name) + self.by: _MetricPattern38By[T] = _MetricPattern38By(client, name) @property def name(self) -> str: @@ -2122,9 +1963,9 @@ class MetricPattern41(Generic[T]): if index == 'loadedaddressindex': return self.by.by_loadedaddressindex() return None -class _MetricPattern42By(Generic[T]): +class _MetricPattern39By(Generic[T]): """Index endpoint methods container.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name @@ -2132,13 +1973,13 @@ class _MetricPattern42By(Generic[T]): def by_emptyaddressindex(self) -> MetricEndpoint[T]: return MetricEndpoint(self._client, self._name, 'emptyaddressindex') -class MetricPattern42(Generic[T]): +class MetricPattern39(Generic[T]): """Index accessor for metrics with 1 indexes.""" - + def __init__(self, client: BrkClientBase, name: str): self._client = client self._name = name - self.by: _MetricPattern42By[T] = _MetricPattern42By(client, name) + self.by: _MetricPattern39By[T] = _MetricPattern39By(client, name) @property def name(self) -> str: @@ -2158,23 +1999,23 @@ class MetricPattern42(Generic[T]): class RealizedPattern3: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.adjusted_sopr: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'adjusted_sopr')) - self.adjusted_sopr_30d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'adjusted_sopr_30d_ema')) - self.adjusted_sopr_7d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'adjusted_sopr_7d_ema')) + self.adjusted_sopr: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'adjusted_sopr')) + self.adjusted_sopr_30d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'adjusted_sopr_30d_ema')) + self.adjusted_sopr_7d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'adjusted_sopr_7d_ema')) self.adjusted_value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_created')) self.adjusted_value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_destroyed')) - self.mvrv: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'mvrv')) + self.mvrv: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'mvrv')) self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'neg_realized_loss')) self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'net_realized_pnl')) - self.net_realized_pnl_cumulative_30d_delta: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) - self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) - self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) + self.net_realized_pnl_cumulative_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) self.net_realized_pnl_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')) self.realized_cap: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap')) - self.realized_cap_30d_delta: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'realized_cap_30d_delta')) + self.realized_cap_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'realized_cap_30d_delta')) self.realized_cap_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap')) self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_loss')) self.realized_loss_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'realized_loss_rel_to_realized_cap')) @@ -2182,72 +2023,102 @@ class RealizedPattern3: self.realized_price_extra: ActivePriceRatioPattern = ActivePriceRatioPattern(client, _m(acc, 'realized_price_ratio')) self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_profit')) self.realized_profit_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'realized_profit_rel_to_realized_cap')) - self.realized_profit_to_loss_ratio: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'realized_profit_to_loss_ratio')) - self.realized_value: DifficultyAdjustmentPattern[Dollars] = DifficultyAdjustmentPattern(client, _m(acc, 'realized_value')) - self.sell_side_risk_ratio: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio')) - self.sell_side_risk_ratio_30d_ema: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) - self.sell_side_risk_ratio_7d_ema: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) - self.sopr: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr')) - self.sopr_30d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr_30d_ema')) - self.sopr_7d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr_7d_ema')) + self.realized_profit_to_loss_ratio: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'realized_profit_to_loss_ratio')) + self.realized_value: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_value')) + self.sell_side_risk_ratio: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio')) + self.sell_side_risk_ratio_30d_ema: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) + self.sell_side_risk_ratio_7d_ema: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) + self.sopr: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr')) + self.sopr_30d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr_30d_ema')) + self.sopr_7d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr_7d_ema')) self.total_realized_pnl: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'total_realized_pnl')) - self.value_created: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'value_created')) - self.value_created_sum: MetricPattern2[Dollars] = MetricPattern2(client, _m(acc, 'value_created_sum')) - self.value_destroyed: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'value_destroyed')) - self.value_destroyed_sum: MetricPattern2[Dollars] = MetricPattern2(client, _m(acc, 'value_destroyed_sum')) + self.value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_created')) + self.value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_destroyed')) class RealizedPattern4: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.adjusted_sopr: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'adjusted_sopr')) - self.adjusted_sopr_30d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'adjusted_sopr_30d_ema')) - self.adjusted_sopr_7d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'adjusted_sopr_7d_ema')) + self.adjusted_sopr: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'adjusted_sopr')) + self.adjusted_sopr_30d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'adjusted_sopr_30d_ema')) + self.adjusted_sopr_7d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'adjusted_sopr_7d_ema')) self.adjusted_value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_created')) self.adjusted_value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'adjusted_value_destroyed')) - self.mvrv: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'mvrv')) + self.mvrv: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'mvrv')) self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'neg_realized_loss')) self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'net_realized_pnl')) - self.net_realized_pnl_cumulative_30d_delta: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) - self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) - self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) + self.net_realized_pnl_cumulative_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) self.net_realized_pnl_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')) self.realized_cap: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap')) - self.realized_cap_30d_delta: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'realized_cap_30d_delta')) + self.realized_cap_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'realized_cap_30d_delta')) self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_loss')) self.realized_loss_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'realized_loss_rel_to_realized_cap')) self.realized_price: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_price')) self.realized_price_extra: RealizedPriceExtraPattern = RealizedPriceExtraPattern(client, _m(acc, 'realized_price')) self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_profit')) self.realized_profit_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'realized_profit_rel_to_realized_cap')) - self.realized_value: DifficultyAdjustmentPattern[Dollars] = DifficultyAdjustmentPattern(client, _m(acc, 'realized_value')) - self.sell_side_risk_ratio: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio')) - self.sell_side_risk_ratio_30d_ema: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) - self.sell_side_risk_ratio_7d_ema: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) - self.sopr: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr')) - self.sopr_30d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr_30d_ema')) - self.sopr_7d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr_7d_ema')) + self.realized_value: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_value')) + self.sell_side_risk_ratio: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio')) + self.sell_side_risk_ratio_30d_ema: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) + self.sell_side_risk_ratio_7d_ema: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) + self.sopr: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr')) + self.sopr_30d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr_30d_ema')) + self.sopr_7d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr_7d_ema')) self.total_realized_pnl: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'total_realized_pnl')) - self.value_created: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'value_created')) - self.value_created_sum: MetricPattern2[Dollars] = MetricPattern2(client, _m(acc, 'value_created_sum')) - self.value_destroyed: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'value_destroyed')) - self.value_destroyed_sum: MetricPattern2[Dollars] = MetricPattern2(client, _m(acc, 'value_destroyed_sum')) + self.value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_created')) + self.value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_destroyed')) + +class Ratio1ySdPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self._0sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, '0sd_usd')) + self.m0_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm0_5sd')) + self.m0_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm0_5sd_usd')) + self.m1_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm1_5sd')) + self.m1_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm1_5sd_usd')) + self.m1sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm1sd')) + self.m1sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm1sd_usd')) + self.m2_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm2_5sd')) + self.m2_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm2_5sd_usd')) + self.m2sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm2sd')) + self.m2sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm2sd_usd')) + self.m3sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'm3sd')) + self.m3sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'm3sd_usd')) + self.p0_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p0_5sd')) + self.p0_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p0_5sd_usd')) + self.p1_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p1_5sd')) + self.p1_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p1_5sd_usd')) + self.p1sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p1sd')) + self.p1sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p1sd_usd')) + self.p2_5sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p2_5sd')) + self.p2_5sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p2_5sd_usd')) + self.p2sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p2sd')) + self.p2sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p2sd_usd')) + self.p3sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'p3sd')) + self.p3sd_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'p3sd_usd')) + self.sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sd')) + self.sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sma')) + self.zscore: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'zscore')) class RealizedPattern2: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.mvrv: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'mvrv')) + self.mvrv: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'mvrv')) self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'neg_realized_loss')) self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'net_realized_pnl')) - self.net_realized_pnl_cumulative_30d_delta: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) - self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) - self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) + self.net_realized_pnl_cumulative_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) self.net_realized_pnl_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')) self.realized_cap: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap')) - self.realized_cap_30d_delta: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'realized_cap_30d_delta')) + self.realized_cap_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'realized_cap_30d_delta')) self.realized_cap_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap')) self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_loss')) self.realized_loss_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'realized_loss_rel_to_realized_cap')) @@ -2255,166 +2126,128 @@ class RealizedPattern2: self.realized_price_extra: ActivePriceRatioPattern = ActivePriceRatioPattern(client, _m(acc, 'realized_price_ratio')) self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_profit')) self.realized_profit_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'realized_profit_rel_to_realized_cap')) - self.realized_profit_to_loss_ratio: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'realized_profit_to_loss_ratio')) - self.realized_value: DifficultyAdjustmentPattern[Dollars] = DifficultyAdjustmentPattern(client, _m(acc, 'realized_value')) - self.sell_side_risk_ratio: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio')) - self.sell_side_risk_ratio_30d_ema: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) - self.sell_side_risk_ratio_7d_ema: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) - self.sopr: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr')) - self.sopr_30d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr_30d_ema')) - self.sopr_7d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr_7d_ema')) + self.realized_profit_to_loss_ratio: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'realized_profit_to_loss_ratio')) + self.realized_value: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_value')) + self.sell_side_risk_ratio: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio')) + self.sell_side_risk_ratio_30d_ema: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) + self.sell_side_risk_ratio_7d_ema: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) + self.sopr: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr')) + self.sopr_30d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr_30d_ema')) + self.sopr_7d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr_7d_ema')) self.total_realized_pnl: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'total_realized_pnl')) - self.value_created: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'value_created')) - self.value_created_sum: MetricPattern2[Dollars] = MetricPattern2(client, _m(acc, 'value_created_sum')) - self.value_destroyed: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'value_destroyed')) - self.value_destroyed_sum: MetricPattern2[Dollars] = MetricPattern2(client, _m(acc, 'value_destroyed_sum')) - -class Ratio1ySdPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self._0sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, '0sd_usd')) - self.m0_5sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'm0_5sd')) - self.m0_5sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'm0_5sd_usd')) - self.m1_5sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'm1_5sd')) - self.m1_5sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'm1_5sd_usd')) - self.m1sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'm1sd')) - self.m1sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'm1sd_usd')) - self.m2_5sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'm2_5sd')) - self.m2_5sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'm2_5sd_usd')) - self.m2sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'm2sd')) - self.m2sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'm2sd_usd')) - self.m3sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'm3sd')) - self.m3sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'm3sd_usd')) - self.p0_5sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'p0_5sd')) - self.p0_5sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'p0_5sd_usd')) - self.p1_5sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'p1_5sd')) - self.p1_5sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'p1_5sd_usd')) - self.p1sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'p1sd')) - self.p1sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'p1sd_usd')) - self.p2_5sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'p2_5sd')) - self.p2_5sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'p2_5sd_usd')) - self.p2sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'p2sd')) - self.p2sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'p2sd_usd')) - self.p3sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'p3sd')) - self.p3sd_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'p3sd_usd')) - self.sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'sd')) - self.sma: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'sma')) - self.zscore: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'zscore')) + self.value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_created')) + self.value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_destroyed')) class RealizedPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.mvrv: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'mvrv')) + self.mvrv: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'mvrv')) self.neg_realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'neg_realized_loss')) self.net_realized_pnl: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'net_realized_pnl')) - self.net_realized_pnl_cumulative_30d_delta: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) - self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) - self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) + self.net_realized_pnl_cumulative_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap')) + self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap')) self.net_realized_pnl_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap')) self.realized_cap: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap')) - self.realized_cap_30d_delta: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'realized_cap_30d_delta')) + self.realized_cap_30d_delta: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'realized_cap_30d_delta')) self.realized_loss: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_loss')) self.realized_loss_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'realized_loss_rel_to_realized_cap')) self.realized_price: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_price')) self.realized_price_extra: RealizedPriceExtraPattern = RealizedPriceExtraPattern(client, _m(acc, 'realized_price')) self.realized_profit: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'realized_profit')) self.realized_profit_rel_to_realized_cap: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'realized_profit_rel_to_realized_cap')) - self.realized_value: DifficultyAdjustmentPattern[Dollars] = DifficultyAdjustmentPattern(client, _m(acc, 'realized_value')) - self.sell_side_risk_ratio: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio')) - self.sell_side_risk_ratio_30d_ema: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) - self.sell_side_risk_ratio_7d_ema: MetricPattern22[StoredF32] = MetricPattern22(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) - self.sopr: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr')) - self.sopr_30d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr_30d_ema')) - self.sopr_7d_ema: MetricPattern22[StoredF64] = MetricPattern22(client, _m(acc, 'sopr_7d_ema')) + self.realized_value: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_value')) + self.sell_side_risk_ratio: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio')) + self.sell_side_risk_ratio_30d_ema: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio_30d_ema')) + self.sell_side_risk_ratio_7d_ema: MetricPattern19[StoredF32] = MetricPattern19(client, _m(acc, 'sell_side_risk_ratio_7d_ema')) + self.sopr: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr')) + self.sopr_30d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr_30d_ema')) + self.sopr_7d_ema: MetricPattern19[StoredF64] = MetricPattern19(client, _m(acc, 'sopr_7d_ema')) self.total_realized_pnl: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'total_realized_pnl')) - self.value_created: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'value_created')) - self.value_created_sum: MetricPattern2[Dollars] = MetricPattern2(client, _m(acc, 'value_created_sum')) - self.value_destroyed: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'value_destroyed')) - self.value_destroyed_sum: MetricPattern2[Dollars] = MetricPattern2(client, _m(acc, 'value_destroyed_sum')) + self.value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_created')) + self.value_destroyed: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'value_destroyed')) class Price111dSmaPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.price: MetricPattern5[Dollars] = MetricPattern5(client, acc) - self.ratio: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio')) - self.ratio_1m_sma: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio_1m_sma')) - self.ratio_1w_sma: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio_1w_sma')) + self.price: MetricPattern4[Dollars] = MetricPattern4(client, acc) + self.ratio: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio')) + self.ratio_1m_sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_1m_sma')) + self.ratio_1w_sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_1w_sma')) self.ratio_1y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio_1y')) self.ratio_2y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio_2y')) self.ratio_4y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio_4y')) - self.ratio_pct1: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio_pct1')) - self.ratio_pct1_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'ratio_pct1_usd')) - self.ratio_pct2: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio_pct2')) - self.ratio_pct2_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'ratio_pct2_usd')) - self.ratio_pct5: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio_pct5')) - self.ratio_pct5_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'ratio_pct5_usd')) - self.ratio_pct95: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio_pct95')) - self.ratio_pct95_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'ratio_pct95_usd')) - self.ratio_pct98: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio_pct98')) - self.ratio_pct98_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'ratio_pct98_usd')) - self.ratio_pct99: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio_pct99')) - self.ratio_pct99_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'ratio_pct99_usd')) + self.ratio_pct1: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct1')) + self.ratio_pct1_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct1_usd')) + self.ratio_pct2: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct2')) + self.ratio_pct2_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct2_usd')) + self.ratio_pct5: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct5')) + self.ratio_pct5_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct5_usd')) + self.ratio_pct95: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct95')) + self.ratio_pct95_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct95_usd')) + self.ratio_pct98: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct98')) + self.ratio_pct98_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct98_usd')) + self.ratio_pct99: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio_pct99')) + self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct99_usd')) self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio')) -class PercentilesPattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.cost_basis_pct05: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct05')) - self.cost_basis_pct10: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct10')) - self.cost_basis_pct15: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct15')) - self.cost_basis_pct20: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct20')) - self.cost_basis_pct25: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct25')) - self.cost_basis_pct30: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct30')) - self.cost_basis_pct35: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct35')) - self.cost_basis_pct40: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct40')) - self.cost_basis_pct45: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct45')) - self.cost_basis_pct50: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct50')) - self.cost_basis_pct55: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct55')) - self.cost_basis_pct60: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct60')) - self.cost_basis_pct65: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct65')) - self.cost_basis_pct70: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct70')) - self.cost_basis_pct75: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct75')) - self.cost_basis_pct80: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct80')) - self.cost_basis_pct85: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct85')) - self.cost_basis_pct90: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct90')) - self.cost_basis_pct95: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct95')) - class ActivePriceRatioPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.ratio: MetricPattern5[StoredF32] = MetricPattern5(client, acc) - self.ratio_1m_sma: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, '1m_sma')) - self.ratio_1w_sma: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, '1w_sma')) + self.ratio: MetricPattern4[StoredF32] = MetricPattern4(client, acc) + self.ratio_1m_sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, '1m_sma')) + self.ratio_1w_sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, '1w_sma')) self.ratio_1y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, '1y')) self.ratio_2y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, '2y')) self.ratio_4y_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, '4y')) - self.ratio_pct1: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'pct1')) - self.ratio_pct1_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct1_usd')) - self.ratio_pct2: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'pct2')) - self.ratio_pct2_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct2_usd')) - self.ratio_pct5: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'pct5')) - self.ratio_pct5_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct5_usd')) - self.ratio_pct95: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'pct95')) - self.ratio_pct95_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct95_usd')) - self.ratio_pct98: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'pct98')) - self.ratio_pct98_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct98_usd')) - self.ratio_pct99: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'pct99')) - self.ratio_pct99_usd: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'pct99_usd')) + self.ratio_pct1: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct1')) + self.ratio_pct1_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct1_usd')) + self.ratio_pct2: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct2')) + self.ratio_pct2_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct2_usd')) + self.ratio_pct5: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct5')) + self.ratio_pct5_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct5_usd')) + self.ratio_pct95: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct95')) + self.ratio_pct95_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct95_usd')) + self.ratio_pct98: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct98')) + self.ratio_pct98_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct98_usd')) + self.ratio_pct99: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'pct99')) + self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct99_usd')) self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, acc) +class PercentilesPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.cost_basis_pct05: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct05')) + self.cost_basis_pct10: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct10')) + self.cost_basis_pct15: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct15')) + self.cost_basis_pct20: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct20')) + self.cost_basis_pct25: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct25')) + self.cost_basis_pct30: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct30')) + self.cost_basis_pct35: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct35')) + self.cost_basis_pct40: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct40')) + self.cost_basis_pct45: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct45')) + self.cost_basis_pct50: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct50')) + self.cost_basis_pct55: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct55')) + self.cost_basis_pct60: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct60')) + self.cost_basis_pct65: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct65')) + self.cost_basis_pct70: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct70')) + self.cost_basis_pct75: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct75')) + self.cost_basis_pct80: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct80')) + self.cost_basis_pct85: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct85')) + self.cost_basis_pct90: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct90')) + self.cost_basis_pct95: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct95')) + class RelativePattern5: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.neg_unrealized_loss_rel_to_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'neg_unrealized_loss_rel_to_market_cap')) @@ -2423,12 +2256,12 @@ class RelativePattern5: self.net_unrealized_pnl_rel_to_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_market_cap')) self.net_unrealized_pnl_rel_to_own_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')) self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')) - self.nupl: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'nupl')) + self.nupl: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'nupl')) self.supply_in_loss_rel_to_circulating_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'supply_in_loss_rel_to_circulating_supply')) self.supply_in_loss_rel_to_own_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'supply_in_loss_rel_to_own_supply')) self.supply_in_profit_rel_to_circulating_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'supply_in_profit_rel_to_circulating_supply')) self.supply_in_profit_rel_to_own_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'supply_in_profit_rel_to_own_supply')) - self.supply_rel_to_circulating_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_rel_to_circulating_supply')) + self.supply_rel_to_circulating_supply: MetricPattern4[StoredF64] = MetricPattern4(client, _m(acc, 'supply_rel_to_circulating_supply')) self.unrealized_loss_rel_to_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'unrealized_loss_rel_to_market_cap')) self.unrealized_loss_rel_to_own_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')) self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')) @@ -2436,47 +2269,48 @@ class RelativePattern5: self.unrealized_profit_rel_to_own_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap')) self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl')) -class AXbtPattern: +class AaopoolPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self._1d_dominance: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, '1d_dominance')) - self._1m_blocks_mined: MetricPattern5[StoredU32] = MetricPattern5(client, _m(acc, '1m_blocks_mined')) - self._1m_dominance: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, '1m_dominance')) - self._1w_blocks_mined: MetricPattern5[StoredU32] = MetricPattern5(client, _m(acc, '1w_blocks_mined')) - self._1w_dominance: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, '1w_dominance')) - self._1y_blocks_mined: MetricPattern5[StoredU32] = MetricPattern5(client, _m(acc, '1y_blocks_mined')) - self._1y_dominance: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, '1y_dominance')) + self._1m_blocks_mined: MetricPattern1[StoredU32] = MetricPattern1(client, _m(acc, '1m_blocks_mined')) + self._1m_dominance: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, '1m_dominance')) + self._1w_blocks_mined: MetricPattern1[StoredU32] = MetricPattern1(client, _m(acc, '1w_blocks_mined')) + self._1w_dominance: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, '1w_dominance')) + self._1y_blocks_mined: MetricPattern1[StoredU32] = MetricPattern1(client, _m(acc, '1y_blocks_mined')) + self._1y_dominance: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, '1y_dominance')) + self._24h_blocks_mined: MetricPattern1[StoredU32] = MetricPattern1(client, _m(acc, '24h_blocks_mined')) + self._24h_dominance: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, '24h_dominance')) self.blocks_mined: BlockCountPattern[StoredU32] = BlockCountPattern(client, _m(acc, 'blocks_mined')) self.coinbase: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, _m(acc, 'coinbase')) - self.days_since_block: MetricPattern5[StoredU16] = MetricPattern5(client, _m(acc, 'days_since_block')) - self.dominance: BlockCountPattern[StoredF32] = BlockCountPattern(client, _m(acc, 'dominance')) - self.fee: SentPattern = SentPattern(client, acc) - self.subsidy: SentPattern = SentPattern(client, acc) + self.days_since_block: MetricPattern4[StoredU16] = MetricPattern4(client, _m(acc, 'days_since_block')) + self.dominance: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'dominance')) + self.fee: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, _m(acc, 'fee')) + self.subsidy: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, _m(acc, 'subsidy')) class PriceAgoPattern(Generic[T]): """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self._10y: MetricPattern5[T] = MetricPattern5(client, _m(acc, '10y_ago')) - self._1d: MetricPattern5[T] = MetricPattern5(client, _m(acc, '1d_ago')) - self._1m: MetricPattern5[T] = MetricPattern5(client, _m(acc, '1m_ago')) - self._1w: MetricPattern5[T] = MetricPattern5(client, _m(acc, '1w_ago')) - self._1y: MetricPattern5[T] = MetricPattern5(client, _m(acc, '1y_ago')) - self._2y: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2y_ago')) - self._3m: MetricPattern5[T] = MetricPattern5(client, _m(acc, '3m_ago')) - self._3y: MetricPattern5[T] = MetricPattern5(client, _m(acc, '3y_ago')) - self._4y: MetricPattern5[T] = MetricPattern5(client, _m(acc, '4y_ago')) - self._5y: MetricPattern5[T] = MetricPattern5(client, _m(acc, '5y_ago')) - self._6m: MetricPattern5[T] = MetricPattern5(client, _m(acc, '6m_ago')) - self._6y: MetricPattern5[T] = MetricPattern5(client, _m(acc, '6y_ago')) - self._8y: MetricPattern5[T] = MetricPattern5(client, _m(acc, '8y_ago')) + self._10y: MetricPattern4[T] = MetricPattern4(client, _m(acc, '10y_ago')) + self._1d: MetricPattern4[T] = MetricPattern4(client, _m(acc, '1d_ago')) + self._1m: MetricPattern4[T] = MetricPattern4(client, _m(acc, '1m_ago')) + self._1w: MetricPattern4[T] = MetricPattern4(client, _m(acc, '1w_ago')) + self._1y: MetricPattern4[T] = MetricPattern4(client, _m(acc, '1y_ago')) + self._2y: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2y_ago')) + self._3m: MetricPattern4[T] = MetricPattern4(client, _m(acc, '3m_ago')) + self._3y: MetricPattern4[T] = MetricPattern4(client, _m(acc, '3y_ago')) + self._4y: MetricPattern4[T] = MetricPattern4(client, _m(acc, '4y_ago')) + self._5y: MetricPattern4[T] = MetricPattern4(client, _m(acc, '5y_ago')) + self._6m: MetricPattern4[T] = MetricPattern4(client, _m(acc, '6m_ago')) + self._6y: MetricPattern4[T] = MetricPattern4(client, _m(acc, '6y_ago')) + self._8y: MetricPattern4[T] = MetricPattern4(client, _m(acc, '8y_ago')) class PeriodLumpSumStackPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self._10y: ActiveSupplyPattern = ActiveSupplyPattern(client, (f'10y_{{acc}}' if acc else '10y')) @@ -2494,42 +2328,42 @@ class PeriodLumpSumStackPattern: class PeriodAveragePricePattern(Generic[T]): """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self._10y: MetricPattern5[T] = MetricPattern5(client, (f'10y_{{acc}}' if acc else '10y')) - self._1m: MetricPattern5[T] = MetricPattern5(client, (f'1m_{{acc}}' if acc else '1m')) - self._1w: MetricPattern5[T] = MetricPattern5(client, (f'1w_{{acc}}' if acc else '1w')) - self._1y: MetricPattern5[T] = MetricPattern5(client, (f'1y_{{acc}}' if acc else '1y')) - self._2y: MetricPattern5[T] = MetricPattern5(client, (f'2y_{{acc}}' if acc else '2y')) - self._3m: MetricPattern5[T] = MetricPattern5(client, (f'3m_{{acc}}' if acc else '3m')) - self._3y: MetricPattern5[T] = MetricPattern5(client, (f'3y_{{acc}}' if acc else '3y')) - self._4y: MetricPattern5[T] = MetricPattern5(client, (f'4y_{{acc}}' if acc else '4y')) - self._5y: MetricPattern5[T] = MetricPattern5(client, (f'5y_{{acc}}' if acc else '5y')) - self._6m: MetricPattern5[T] = MetricPattern5(client, (f'6m_{{acc}}' if acc else '6m')) - self._6y: MetricPattern5[T] = MetricPattern5(client, (f'6y_{{acc}}' if acc else '6y')) - self._8y: MetricPattern5[T] = MetricPattern5(client, (f'8y_{{acc}}' if acc else '8y')) + self._10y: MetricPattern4[T] = MetricPattern4(client, (f'10y_{{acc}}' if acc else '10y')) + self._1m: MetricPattern4[T] = MetricPattern4(client, (f'1m_{{acc}}' if acc else '1m')) + self._1w: MetricPattern4[T] = MetricPattern4(client, (f'1w_{{acc}}' if acc else '1w')) + self._1y: MetricPattern4[T] = MetricPattern4(client, (f'1y_{{acc}}' if acc else '1y')) + self._2y: MetricPattern4[T] = MetricPattern4(client, (f'2y_{{acc}}' if acc else '2y')) + self._3m: MetricPattern4[T] = MetricPattern4(client, (f'3m_{{acc}}' if acc else '3m')) + self._3y: MetricPattern4[T] = MetricPattern4(client, (f'3y_{{acc}}' if acc else '3y')) + self._4y: MetricPattern4[T] = MetricPattern4(client, (f'4y_{{acc}}' if acc else '4y')) + self._5y: MetricPattern4[T] = MetricPattern4(client, (f'5y_{{acc}}' if acc else '5y')) + self._6m: MetricPattern4[T] = MetricPattern4(client, (f'6m_{{acc}}' if acc else '6m')) + self._6y: MetricPattern4[T] = MetricPattern4(client, (f'6y_{{acc}}' if acc else '6y')) + self._8y: MetricPattern4[T] = MetricPattern4(client, (f'8y_{{acc}}' if acc else '8y')) class ClassAveragePricePattern(Generic[T]): """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self._2015: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2015_average_price')) - self._2016: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2016_average_price')) - self._2017: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2017_average_price')) - self._2018: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2018_average_price')) - self._2019: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2019_average_price')) - self._2020: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2020_average_price')) - self._2021: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2021_average_price')) - self._2022: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2022_average_price')) - self._2023: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2023_average_price')) - self._2024: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2024_average_price')) - self._2025: MetricPattern5[T] = MetricPattern5(client, _m(acc, '2025_average_price')) + self._2015: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2015_average_price')) + self._2016: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2016_average_price')) + self._2017: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2017_average_price')) + self._2018: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2018_average_price')) + self._2019: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2019_average_price')) + self._2020: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2020_average_price')) + self._2021: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2021_average_price')) + self._2022: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2022_average_price')) + self._2023: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2023_average_price')) + self._2024: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2024_average_price')) + self._2025: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2025_average_price')) class RelativePattern2: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.neg_unrealized_loss_rel_to_own_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap')) @@ -2545,66 +2379,53 @@ class RelativePattern2: class RelativePattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.neg_unrealized_loss_rel_to_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'neg_unrealized_loss_rel_to_market_cap')) self.net_unrealized_pnl_rel_to_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl_rel_to_market_cap')) - self.nupl: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'nupl')) + self.nupl: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'nupl')) self.supply_in_loss_rel_to_circulating_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'supply_in_loss_rel_to_circulating_supply')) self.supply_in_loss_rel_to_own_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'supply_in_loss_rel_to_own_supply')) self.supply_in_profit_rel_to_circulating_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'supply_in_profit_rel_to_circulating_supply')) self.supply_in_profit_rel_to_own_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'supply_in_profit_rel_to_own_supply')) - self.supply_rel_to_circulating_supply: MetricPattern5[StoredF64] = MetricPattern5(client, _m(acc, 'supply_rel_to_circulating_supply')) + self.supply_rel_to_circulating_supply: MetricPattern4[StoredF64] = MetricPattern4(client, _m(acc, 'supply_rel_to_circulating_supply')) self.unrealized_loss_rel_to_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'unrealized_loss_rel_to_market_cap')) self.unrealized_profit_rel_to_market_cap: MetricPattern3[StoredF32] = MetricPattern3(client, _m(acc, 'unrealized_profit_rel_to_market_cap')) +class AddrCountPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.all: MetricPattern1[StoredU64] = MetricPattern1(client, (f'addr_{{acc}}' if acc else 'addr')) + self.p2a: MetricPattern1[StoredU64] = MetricPattern1(client, (f'p2a_addr_{{acc}}' if acc else 'p2a_addr')) + self.p2pk33: MetricPattern1[StoredU64] = MetricPattern1(client, (f'p2pk33_addr_{{acc}}' if acc else 'p2pk33_addr')) + self.p2pk65: MetricPattern1[StoredU64] = MetricPattern1(client, (f'p2pk65_addr_{{acc}}' if acc else 'p2pk65_addr')) + self.p2pkh: MetricPattern1[StoredU64] = MetricPattern1(client, (f'p2pkh_addr_{{acc}}' if acc else 'p2pkh_addr')) + self.p2sh: MetricPattern1[StoredU64] = MetricPattern1(client, (f'p2sh_addr_{{acc}}' if acc else 'p2sh_addr')) + self.p2tr: MetricPattern1[StoredU64] = MetricPattern1(client, (f'p2tr_addr_{{acc}}' if acc else 'p2tr_addr')) + self.p2wpkh: MetricPattern1[StoredU64] = MetricPattern1(client, (f'p2wpkh_addr_{{acc}}' if acc else 'p2wpkh_addr')) + self.p2wsh: MetricPattern1[StoredU64] = MetricPattern1(client, (f'p2wsh_addr_{{acc}}' if acc else 'p2wsh_addr')) + class UnrealizedPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.neg_unrealized_loss: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'neg_unrealized_loss')) self.net_unrealized_pnl: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'net_unrealized_pnl')) - self.supply_in_loss: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply_in_loss')) - self.supply_in_loss_value: SupplyValuePattern = SupplyValuePattern(client, _m(acc, 'supply_in_loss')) - self.supply_in_profit: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply_in_profit')) - self.supply_in_profit_value: SupplyValuePattern = SupplyValuePattern(client, _m(acc, 'supply_in_profit')) + self.supply_in_loss: SupplyInLossPattern = SupplyInLossPattern(client, _m(acc, 'supply_in_loss')) + self.supply_in_loss_value: SupplyInLossValuePattern = SupplyInLossValuePattern(client, _m(acc, 'supply_in_loss')) + self.supply_in_profit: SupplyInLossPattern = SupplyInLossPattern(client, _m(acc, 'supply_in_profit')) + self.supply_in_profit_value: SupplyInLossValuePattern = SupplyInLossValuePattern(client, _m(acc, 'supply_in_profit')) self.total_unrealized_pnl: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'total_unrealized_pnl')) self.unrealized_loss: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'unrealized_loss')) self.unrealized_profit: MetricPattern3[Dollars] = MetricPattern3(client, _m(acc, 'unrealized_profit')) -class AddresstypeToHeightToAddrCountPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.p2a: MetricPattern26[T] = MetricPattern26(client, (f'p2a_{{acc}}' if acc else 'p2a')) - self.p2pk33: MetricPattern26[T] = MetricPattern26(client, (f'p2pk33_{{acc}}' if acc else 'p2pk33')) - self.p2pk65: MetricPattern26[T] = MetricPattern26(client, (f'p2pk65_{{acc}}' if acc else 'p2pk65')) - self.p2pkh: MetricPattern26[T] = MetricPattern26(client, (f'p2pkh_{{acc}}' if acc else 'p2pkh')) - self.p2sh: MetricPattern26[T] = MetricPattern26(client, (f'p2sh_{{acc}}' if acc else 'p2sh')) - self.p2tr: MetricPattern26[T] = MetricPattern26(client, (f'p2tr_{{acc}}' if acc else 'p2tr')) - self.p2wpkh: MetricPattern26[T] = MetricPattern26(client, (f'p2wpkh_{{acc}}' if acc else 'p2wpkh')) - self.p2wsh: MetricPattern26[T] = MetricPattern26(client, (f'p2wsh_{{acc}}' if acc else 'p2wsh')) - -class CountPattern2(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'average')) - self.cumulative: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'cumulative')) - self.distribution: BlockIntervalPattern[T] = BlockIntervalPattern(client, acc) - self.max: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'max')) - self.min: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'min')) - self.minmax: MinmaxPattern[T] = MinmaxPattern(client, acc) - self.sum: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'sum')) - self.sum_cum: SumCumPattern[T] = SumCumPattern(client, acc) - class _0satsPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.activity: ActivityPattern2 = ActivityPattern2(client, acc) @@ -2612,390 +2433,366 @@ class _0satsPattern: self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) self.realized: RealizedPattern = RealizedPattern(client, acc) self.relative: RelativePattern = RelativePattern(client, acc) - self.supply: SupplyPattern3 = SupplyPattern3(client, acc) + self.supply: SupplyPattern2 = SupplyPattern2(client, acc) self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) class PeriodCagrPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self._10y: MetricPattern5[StoredF32] = MetricPattern5(client, (f'10y_{{acc}}' if acc else '10y')) - self._2y: MetricPattern5[StoredF32] = MetricPattern5(client, (f'2y_{{acc}}' if acc else '2y')) - self._3y: MetricPattern5[StoredF32] = MetricPattern5(client, (f'3y_{{acc}}' if acc else '3y')) - self._4y: MetricPattern5[StoredF32] = MetricPattern5(client, (f'4y_{{acc}}' if acc else '4y')) - self._5y: MetricPattern5[StoredF32] = MetricPattern5(client, (f'5y_{{acc}}' if acc else '5y')) - self._6y: MetricPattern5[StoredF32] = MetricPattern5(client, (f'6y_{{acc}}' if acc else '6y')) - self._8y: MetricPattern5[StoredF32] = MetricPattern5(client, (f'8y_{{acc}}' if acc else '8y')) + self._10y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'10y_{{acc}}' if acc else '10y')) + self._2y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'2y_{{acc}}' if acc else '2y')) + self._3y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'3y_{{acc}}' if acc else '3y')) + self._4y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'4y_{{acc}}' if acc else '4y')) + self._5y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'5y_{{acc}}' if acc else '5y')) + self._6y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'6y_{{acc}}' if acc else '6y')) + self._8y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'8y_{{acc}}' if acc else '8y')) -class BlockSizePattern(Generic[T]): +class BitcoinPattern(Generic[T]): """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.average: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'average')) - self.cumulative: MetricPattern4[T] = MetricPattern4(client, _m(acc, 'cumulative')) - self.distribution: BlockIntervalPattern[T] = BlockIntervalPattern(client, acc) - self.max: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'max')) - self.min: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'min')) - self.sum: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'sum')) - self.sum_cum: SumCumPattern[T] = SumCumPattern(client, acc) -class DollarsPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'average')) - self.base: MetricPattern26[T] = MetricPattern26(client, acc) + self.base: MetricPattern23[T] = MetricPattern23(client, acc) self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max')) self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min')) self.percentiles: PercentilesPattern[T] = PercentilesPattern(client, acc) self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) -class _10yPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.activity: ActivityPattern2 = ActivityPattern2(client, acc) - self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) - self.realized: RealizedPattern4 = RealizedPattern4(client, acc) - self.relative: RelativePattern = RelativePattern(client, acc) - self.supply: SupplyPattern3 = SupplyPattern3(client, acc) - self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) - -class _10yTo12yPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.activity: ActivityPattern2 = ActivityPattern2(client, acc) - self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, acc) - self.realized: RealizedPattern2 = RealizedPattern2(client, acc) - self.relative: RelativePattern2 = RelativePattern2(client, acc) - self.supply: SupplyPattern3 = SupplyPattern3(client, acc) - self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) - -class _100btcPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.activity: ActivityPattern2 = ActivityPattern2(client, acc) - self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) - self.realized: RealizedPattern = RealizedPattern(client, acc) - self.relative: RelativePattern = RelativePattern(client, acc) - self.supply: SupplyPattern3 = SupplyPattern3(client, acc) - self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) - class _0satsPattern2: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.activity: ActivityPattern2 = ActivityPattern2(client, acc) self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) self.realized: RealizedPattern = RealizedPattern(client, acc) self.relative: RelativePattern4 = RelativePattern4(client, _m(acc, 'supply_in')) - self.supply: SupplyPattern3 = SupplyPattern3(client, acc) + self.supply: SupplyPattern2 = SupplyPattern2(client, acc) self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) -class BitcoinPattern(Generic[T]): +class _10yTo12yPattern: """Pattern struct for repeated tree structure.""" - + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.activity: ActivityPattern2 = ActivityPattern2(client, acc) + self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, acc) + self.realized: RealizedPattern2 = RealizedPattern2(client, acc) + self.relative: RelativePattern2 = RelativePattern2(client, acc) + self.supply: SupplyPattern2 = SupplyPattern2(client, acc) + self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) + +class _10yPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.activity: ActivityPattern2 = ActivityPattern2(client, acc) + self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) + self.realized: RealizedPattern4 = RealizedPattern4(client, acc) + self.relative: RelativePattern = RelativePattern(client, acc) + self.supply: SupplyPattern2 = SupplyPattern2(client, acc) + self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) + +class _100btcPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.activity: ActivityPattern2 = ActivityPattern2(client, acc) + self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) + self.realized: RealizedPattern = RealizedPattern(client, acc) + self.relative: RelativePattern = RelativePattern(client, acc) + self.supply: SupplyPattern2 = SupplyPattern2(client, acc) + self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) + +class BlockSizePattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'average')) - self.base: MetricPattern26[T] = MetricPattern26(client, acc) - self.cumulative: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'cum')) + self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max')) self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min')) + self.percentiles: PercentilesPattern[T] = PercentilesPattern(client, acc) self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) class ActivityPattern2: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.coinblocks_destroyed: BlockCountPattern[StoredF64] = BlockCountPattern(client, _m(acc, 'coinblocks_destroyed')) self.coindays_destroyed: BlockCountPattern[StoredF64] = BlockCountPattern(client, _m(acc, 'coindays_destroyed')) - self.satblocks_destroyed: MetricPattern26[Sats] = MetricPattern26(client, _m(acc, 'satblocks_destroyed')) - self.satdays_destroyed: MetricPattern26[Sats] = MetricPattern26(client, _m(acc, 'satdays_destroyed')) + self.satblocks_destroyed: MetricPattern23[Sats] = MetricPattern23(client, _m(acc, 'satblocks_destroyed')) + self.satdays_destroyed: MetricPattern23[Sats] = MetricPattern23(client, _m(acc, 'satdays_destroyed')) self.sent: SentPattern = SentPattern(client, _m(acc, 'sent')) -class SentPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.base: MetricPattern26[Sats] = MetricPattern26(client, _m(acc, 'height_fee')) - self.bitcoin: BlockCountPattern[Bitcoin] = BlockCountPattern(client, _m(acc, 'btc')) - self.dollars: SumCumPattern[Dollars] = SumCumPattern(client, _m(acc, 'usd')) - self.dollars_source: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'usd')) - self.sats: SumCumPattern[Sats] = SumCumPattern(client, _m(acc, 'fee')) - -class SupplyPattern3: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply')) - self.supply_half: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_half')) - self.supply_half_value: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_half')) - self.supply_value: SupplyValuePattern = SupplyValuePattern(client, _m(acc, 'supply')) - self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, _m(acc, 'utxo_count')) - class PercentilesPattern(Generic[T]): """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.median: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'median')) - self.pct10: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'pct10')) - self.pct25: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'pct25')) - self.pct75: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'pct75')) - self.pct90: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'pct90')) -class SupplyPattern2: - """Pattern struct for repeated tree structure.""" - def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.base: MetricPattern26[Sats] = MetricPattern26(client, acc) - self.bitcoin: MetricPattern5[Bitcoin] = MetricPattern5(client, _m(acc, 'btc')) - self.dollars: MetricPattern5[Dollars] = MetricPattern5(client, _m(acc, 'usd')) - self.sats: MetricPattern7[Sats] = MetricPattern7(client, acc) + self.median: MetricPattern19[T] = MetricPattern19(client, _m(acc, 'median')) + self.pct10: MetricPattern19[T] = MetricPattern19(client, _m(acc, 'pct10')) + self.pct25: MetricPattern19[T] = MetricPattern19(client, _m(acc, 'pct25')) + self.pct75: MetricPattern19[T] = MetricPattern19(client, _m(acc, 'pct75')) + self.pct90: MetricPattern19[T] = MetricPattern19(client, _m(acc, 'pct90')) -class PriceHighInSatsPattern(Generic[T]): +class IntervalPattern(Generic[T]): """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.dateindex: MetricPattern22[T] = MetricPattern22(client, acc) - self.height: MetricPattern26[T] = MetricPattern26(client, acc) - self.max: MetricPattern24[T] = MetricPattern24(client, _m(acc, 'max')) - self.rest: MetricPattern7[T] = MetricPattern7(client, _m(acc, 'max')) -class PriceLowInSatsPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.dateindex: MetricPattern22[T] = MetricPattern22(client, acc) - self.height: MetricPattern26[T] = MetricPattern26(client, acc) - self.min: MetricPattern24[T] = MetricPattern24(client, _m(acc, 'min')) - self.rest: MetricPattern7[T] = MetricPattern7(client, _m(acc, 'min')) - -class BlockIntervalPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.average: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'average')) - self.max: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'max')) - self.min: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'min')) + self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'average')) + self.base: MetricPattern23[T] = MetricPattern23(client, acc) + self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max')) + self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min')) self.percentiles: PercentilesPattern[T] = PercentilesPattern(client, acc) -class ActiveSupplyPattern: +class SupplyInLossPattern: """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.bitcoin: MetricPattern1[Bitcoin] = MetricPattern1(client, _m(acc, 'btc')) - self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd')) - self.sats: MetricPattern1[Sats] = MetricPattern1(client, acc) -class CostBasisPattern2: - """Pattern struct for repeated tree structure.""" - def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.max_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis')) - self.min_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis')) - self.percentiles: PercentilesPattern2 = PercentilesPattern2(client, _m(acc, 'cost_basis')) + self.base: MetricPattern7[Sats] = MetricPattern7(client, acc) + self.bitcoin: MetricPattern4[Bitcoin] = MetricPattern4(client, _m(acc, 'btc')) + self.dollars: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'usd')) + self.sats: MetricPattern5[Sats] = MetricPattern5(client, acc) -class CoinbasePattern: +class PriceHighSatsPattern(Generic[T]): """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.bitcoin: BitcoinPattern[Bitcoin] = BitcoinPattern(client, _m(acc, 'btc')) - self.dollars: DollarsPattern[Dollars] = DollarsPattern(client, _m(acc, 'usd')) - self.sats: DollarsPattern[Sats] = DollarsPattern(client, acc) + self.dateindex: MetricPattern19[T] = MetricPattern19(client, acc) + self.difficultyepoch: MetricPattern21[T] = MetricPattern21(client, _m(acc, 'max')) + self.height: MetricPattern23[T] = MetricPattern23(client, acc) + self.rest: MetricPattern5[T] = MetricPattern5(client, _m(acc, 'max')) + +class TxVsizePattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.average: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'average')) + self.max: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'max')) + self.min: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'min')) + self.percentiles: PercentilesPattern[T] = PercentilesPattern(client, acc) class UnclaimedRewardsPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.bitcoin: BlockCountPattern[Bitcoin] = BlockCountPattern(client, _m(acc, 'btc')) self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd')) self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc) -class BlockCountPattern(Generic[T]): +class CostBasisPattern2: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.base: MetricPattern26[T] = MetricPattern26(client, acc) + self.max: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis')) + self.min: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis')) + self.percentiles: PercentilesPattern2 = PercentilesPattern2(client, _m(acc, 'cost_basis')) + +class ActiveSupplyPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.bitcoin: MetricPattern1[Bitcoin] = MetricPattern1(client, _m(acc, 'btc')) + self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd')) + self.sats: MetricPattern1[Sats] = MetricPattern1(client, acc) + +class SentPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.bitcoin: BlockCountPattern[Bitcoin] = BlockCountPattern(client, _m(acc, 'btc')) + self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd')) + self.sats: SatsPattern[Sats] = SatsPattern(client, acc) + +class CoinbasePattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.bitcoin: BitcoinPattern[Bitcoin] = BitcoinPattern(client, _m(acc, 'btc')) + self.dollars: BitcoinPattern[Dollars] = BitcoinPattern(client, _m(acc, 'usd')) + self.sats: BitcoinPattern[Sats] = BitcoinPattern(client, acc) + +class SupplyPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.supply: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply')) + self.supply_half: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_half')) + self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, _m(acc, 'utxo_count')) + +class SatsPattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.base: MetricPattern23[T] = MetricPattern23(client, acc) self.cumulative: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'cumulative')) self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) -class SupplyValuePattern: +class CostBasisPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.bitcoin: MetricPattern26[Bitcoin] = MetricPattern26(client, _m(acc, 'btc')) - self.dollars: MetricPattern26[Dollars] = MetricPattern26(client, _m(acc, 'usd')) + self.max: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis')) + self.min: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis')) + +class SupplyInLossValuePattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.bitcoin: MetricPattern23[Bitcoin] = MetricPattern23(client, _m(acc, 'btc')) + self.dollars: MetricPattern23[Dollars] = MetricPattern23(client, _m(acc, 'usd')) + +class _1dReturns1mSdPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sd')) + self.sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sma')) class RelativePattern4: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.supply_in_loss_rel_to_own_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'loss_rel_to_own_supply')) self.supply_in_profit_rel_to_own_supply: MetricPattern3[StoredF64] = MetricPattern3(client, _m(acc, 'profit_rel_to_own_supply')) -class CostBasisPattern: +class BlockCountPattern(Generic[T]): """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.max_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis')) - self.min_cost_basis: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis')) -class _1dReturns1mSdPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.sd: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'sd')) - self.sma: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'sma')) - -class MinmaxPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.max: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'max')) - self.min: MetricPattern22[T] = MetricPattern22(client, _m(acc, 'min')) - -class SumCumPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) - self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) - -class IndexesPattern2(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.dateindex: MetricPattern22[T] = MetricPattern22(client, acc) - self.rest: MetricPattern7[T] = MetricPattern7(client, _m(acc, 'average')) - -class DifficultyAdjustmentPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.base: MetricPattern26[T] = MetricPattern26(client, acc) - self.rest: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) + self.sum: MetricPattern1[T] = MetricPattern1(client, acc) class RealizedPriceExtraPattern: """Pattern struct for repeated tree structure.""" - + def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.ratio: MetricPattern5[StoredF32] = MetricPattern5(client, _m(acc, 'ratio')) + self.ratio: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio')) # Catalog tree classes class CatalogTree: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.computed: CatalogTree_Computed = CatalogTree_Computed(client, f'{base_path}_computed') - self.indexed: CatalogTree_Indexed = CatalogTree_Indexed(client, f'{base_path}_indexed') -class CatalogTree_Computed: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.blocks: CatalogTree_Computed_Blocks = CatalogTree_Computed_Blocks(client, f'{base_path}_blocks') - self.cointime: CatalogTree_Computed_Cointime = CatalogTree_Computed_Cointime(client, f'{base_path}_cointime') - self.constants: CatalogTree_Computed_Constants = CatalogTree_Computed_Constants(client, f'{base_path}_constants') - self.distribution: CatalogTree_Computed_Distribution = CatalogTree_Computed_Distribution(client, f'{base_path}_distribution') - self.indexes: CatalogTree_Computed_Indexes = CatalogTree_Computed_Indexes(client, f'{base_path}_indexes') - self.inputs: CatalogTree_Computed_Inputs = CatalogTree_Computed_Inputs(client, f'{base_path}_inputs') - self.market: CatalogTree_Computed_Market = CatalogTree_Computed_Market(client, f'{base_path}_market') - self.outputs: CatalogTree_Computed_Outputs = CatalogTree_Computed_Outputs(client, f'{base_path}_outputs') - self.pools: CatalogTree_Computed_Pools = CatalogTree_Computed_Pools(client, f'{base_path}_pools') - self.positions: CatalogTree_Computed_Positions = CatalogTree_Computed_Positions(client, f'{base_path}_positions') - self.price: CatalogTree_Computed_Price = CatalogTree_Computed_Price(client, f'{base_path}_price') - self.scripts: CatalogTree_Computed_Scripts = CatalogTree_Computed_Scripts(client, f'{base_path}_scripts') - self.supply: CatalogTree_Computed_Supply = CatalogTree_Computed_Supply(client, f'{base_path}_supply') - self.transactions: CatalogTree_Computed_Transactions = CatalogTree_Computed_Transactions(client, f'{base_path}_transactions') + self.addresses: CatalogTree_Addresses = CatalogTree_Addresses(client, f'{base_path}_addresses') + self.blocks: CatalogTree_Blocks = CatalogTree_Blocks(client, f'{base_path}_blocks') + self.cointime: CatalogTree_Cointime = CatalogTree_Cointime(client, f'{base_path}_cointime') + self.constants: CatalogTree_Constants = CatalogTree_Constants(client, f'{base_path}_constants') + self.distribution: CatalogTree_Distribution = CatalogTree_Distribution(client, f'{base_path}_distribution') + self.indexes: CatalogTree_Indexes = CatalogTree_Indexes(client, f'{base_path}_indexes') + self.inputs: CatalogTree_Inputs = CatalogTree_Inputs(client, f'{base_path}_inputs') + self.market: CatalogTree_Market = CatalogTree_Market(client, f'{base_path}_market') + self.outputs: CatalogTree_Outputs = CatalogTree_Outputs(client, f'{base_path}_outputs') + self.pools: CatalogTree_Pools = CatalogTree_Pools(client, f'{base_path}_pools') + self.positions: CatalogTree_Positions = CatalogTree_Positions(client, f'{base_path}_positions') + self.price: CatalogTree_Price = CatalogTree_Price(client, f'{base_path}_price') + self.scripts: CatalogTree_Scripts = CatalogTree_Scripts(client, f'{base_path}_scripts') + self.supply: CatalogTree_Supply = CatalogTree_Supply(client, f'{base_path}_supply') + self.transactions: CatalogTree_Transactions = CatalogTree_Transactions(client, f'{base_path}_transactions') -class CatalogTree_Computed_Blocks: +class CatalogTree_Addresses: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.count: CatalogTree_Computed_Blocks_Count = CatalogTree_Computed_Blocks_Count(client, f'{base_path}_count') - self.difficulty: CatalogTree_Computed_Blocks_Difficulty = CatalogTree_Computed_Blocks_Difficulty(client, f'{base_path}_difficulty') - self.halving: CatalogTree_Computed_Blocks_Halving = CatalogTree_Computed_Blocks_Halving(client, f'{base_path}_halving') - self.interval: CatalogTree_Computed_Blocks_Interval = CatalogTree_Computed_Blocks_Interval(client, f'{base_path}_interval') - self.mining: CatalogTree_Computed_Blocks_Mining = CatalogTree_Computed_Blocks_Mining(client, f'{base_path}_mining') - self.rewards: CatalogTree_Computed_Blocks_Rewards = CatalogTree_Computed_Blocks_Rewards(client, f'{base_path}_rewards') - self.size: CatalogTree_Computed_Blocks_Size = CatalogTree_Computed_Blocks_Size(client, f'{base_path}_size') - self.time: CatalogTree_Computed_Blocks_Time = CatalogTree_Computed_Blocks_Time(client, f'{base_path}_time') - self.weight: CatalogTree_Computed_Blocks_Weight = CatalogTree_Computed_Blocks_Weight(client, f'{base_path}_weight') -class CatalogTree_Computed_Blocks_Count: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self._1m_block_count: MetricPattern5[StoredU32] = MetricPattern5(client, f'{base_path}_1m_block_count') - self._1w_block_count: MetricPattern5[StoredU32] = MetricPattern5(client, f'{base_path}_1w_block_count') - self._1y_block_count: MetricPattern5[StoredU32] = MetricPattern5(client, f'{base_path}_1y_block_count') - self._24h_block_count: MetricPattern26[StoredU32] = MetricPattern26(client, f'{base_path}_24h_block_count') + self.first_p2aaddressindex: MetricPattern23[P2AAddressIndex] = MetricPattern23(client, f'{base_path}_first_p2aaddressindex') + self.first_p2pk33addressindex: MetricPattern23[P2PK33AddressIndex] = MetricPattern23(client, f'{base_path}_first_p2pk33addressindex') + self.first_p2pk65addressindex: MetricPattern23[P2PK65AddressIndex] = MetricPattern23(client, f'{base_path}_first_p2pk65addressindex') + self.first_p2pkhaddressindex: MetricPattern23[P2PKHAddressIndex] = MetricPattern23(client, f'{base_path}_first_p2pkhaddressindex') + self.first_p2shaddressindex: MetricPattern23[P2SHAddressIndex] = MetricPattern23(client, f'{base_path}_first_p2shaddressindex') + self.first_p2traddressindex: MetricPattern23[P2TRAddressIndex] = MetricPattern23(client, f'{base_path}_first_p2traddressindex') + self.first_p2wpkhaddressindex: MetricPattern23[P2WPKHAddressIndex] = MetricPattern23(client, f'{base_path}_first_p2wpkhaddressindex') + self.first_p2wshaddressindex: MetricPattern23[P2WSHAddressIndex] = MetricPattern23(client, f'{base_path}_first_p2wshaddressindex') + self.p2abytes: MetricPattern27[P2ABytes] = MetricPattern27(client, f'{base_path}_p2abytes') + self.p2pk33bytes: MetricPattern29[P2PK33Bytes] = MetricPattern29(client, f'{base_path}_p2pk33bytes') + self.p2pk65bytes: MetricPattern30[P2PK65Bytes] = MetricPattern30(client, f'{base_path}_p2pk65bytes') + self.p2pkhbytes: MetricPattern31[P2PKHBytes] = MetricPattern31(client, f'{base_path}_p2pkhbytes') + self.p2shbytes: MetricPattern32[P2SHBytes] = MetricPattern32(client, f'{base_path}_p2shbytes') + self.p2trbytes: MetricPattern33[P2TRBytes] = MetricPattern33(client, f'{base_path}_p2trbytes') + self.p2wpkhbytes: MetricPattern34[P2WPKHBytes] = MetricPattern34(client, f'{base_path}_p2wpkhbytes') + self.p2wshbytes: MetricPattern35[P2WSHBytes] = MetricPattern35(client, f'{base_path}_p2wshbytes') + +class CatalogTree_Blocks: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.blockhash: MetricPattern23[BlockHash] = MetricPattern23(client, f'{base_path}_blockhash') + self.count: CatalogTree_Blocks_Count = CatalogTree_Blocks_Count(client, f'{base_path}_count') + self.difficulty: CatalogTree_Blocks_Difficulty = CatalogTree_Blocks_Difficulty(client, f'{base_path}_difficulty') + self.halving: CatalogTree_Blocks_Halving = CatalogTree_Blocks_Halving(client, f'{base_path}_halving') + self.interval: IntervalPattern[Timestamp] = IntervalPattern(client, 'block_interval') + self.mining: CatalogTree_Blocks_Mining = CatalogTree_Blocks_Mining(client, f'{base_path}_mining') + self.rewards: CatalogTree_Blocks_Rewards = CatalogTree_Blocks_Rewards(client, f'{base_path}_rewards') + self.size: CatalogTree_Blocks_Size = CatalogTree_Blocks_Size(client, f'{base_path}_size') + self.time: CatalogTree_Blocks_Time = CatalogTree_Blocks_Time(client, f'{base_path}_time') + self.timestamp: MetricPattern23[Timestamp] = MetricPattern23(client, f'{base_path}_timestamp') + self.total_size: MetricPattern23[StoredU64] = MetricPattern23(client, f'{base_path}_total_size') + self.weight: CatalogTree_Blocks_Weight = CatalogTree_Blocks_Weight(client, f'{base_path}_weight') + +class CatalogTree_Blocks_Count: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self._1m_block_count: MetricPattern1[StoredU32] = MetricPattern1(client, f'{base_path}_1m_block_count') + self._1m_start: MetricPattern23[Height] = MetricPattern23(client, f'{base_path}_1m_start') + self._1w_block_count: MetricPattern1[StoredU32] = MetricPattern1(client, f'{base_path}_1w_block_count') + self._1w_start: MetricPattern23[Height] = MetricPattern23(client, f'{base_path}_1w_start') + self._1y_block_count: MetricPattern1[StoredU32] = MetricPattern1(client, f'{base_path}_1y_block_count') + self._1y_start: MetricPattern23[Height] = MetricPattern23(client, f'{base_path}_1y_start') + self._24h_block_count: MetricPattern1[StoredU32] = MetricPattern1(client, f'{base_path}_24h_block_count') + self._24h_start: MetricPattern23[Height] = MetricPattern23(client, f'{base_path}_24h_start') self.block_count: BlockCountPattern[StoredU32] = BlockCountPattern(client, 'block_count') - self.block_count_target: MetricPattern5[StoredU64] = MetricPattern5(client, f'{base_path}_block_count_target') + self.block_count_target: MetricPattern4[StoredU64] = MetricPattern4(client, f'{base_path}_block_count_target') -class CatalogTree_Computed_Blocks_Difficulty: +class CatalogTree_Blocks_Difficulty: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.base: MetricPattern23[StoredF64] = MetricPattern23(client, f'{base_path}_base') self.blocks_before_next_difficulty_adjustment: MetricPattern1[StoredU32] = MetricPattern1(client, f'{base_path}_blocks_before_next_difficulty_adjustment') self.days_before_next_difficulty_adjustment: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_days_before_next_difficulty_adjustment') - self.difficultyepoch: MetricPattern5[DifficultyEpoch] = MetricPattern5(client, f'{base_path}_difficultyepoch') + self.difficultyepoch: MetricPattern4[DifficultyEpoch] = MetricPattern4(client, f'{base_path}_difficultyepoch') -class CatalogTree_Computed_Blocks_Halving: +class CatalogTree_Blocks_Halving: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.blocks_before_next_halving: MetricPattern1[StoredU32] = MetricPattern1(client, f'{base_path}_blocks_before_next_halving') self.days_before_next_halving: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_days_before_next_halving') - self.halvingepoch: MetricPattern5[HalvingEpoch] = MetricPattern5(client, f'{base_path}_halvingepoch') + self.halvingepoch: MetricPattern4[HalvingEpoch] = MetricPattern4(client, f'{base_path}_halvingepoch') -class CatalogTree_Computed_Blocks_Interval: +class CatalogTree_Blocks_Mining: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.block_interval: BlockIntervalPattern[Timestamp] = BlockIntervalPattern(client, 'block_interval') - self.interval: MetricPattern26[Timestamp] = MetricPattern26(client, f'{base_path}_interval') -class CatalogTree_Computed_Blocks_Mining: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): self.difficulty: MetricPattern2[StoredF64] = MetricPattern2(client, f'{base_path}_difficulty') - self.difficulty_adjustment: DifficultyAdjustmentPattern[StoredF32] = DifficultyAdjustmentPattern(client, 'difficulty_adjustment') + self.difficulty_adjustment: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_difficulty_adjustment') self.difficulty_as_hash: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_difficulty_as_hash') self.hash_price_phs: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_price_phs') self.hash_price_phs_min: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_price_phs_min') @@ -3003,67 +2800,68 @@ class CatalogTree_Computed_Blocks_Mining: self.hash_price_ths: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_price_ths') self.hash_price_ths_min: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_price_ths_min') self.hash_rate: MetricPattern1[StoredF64] = MetricPattern1(client, f'{base_path}_hash_rate') - self.hash_rate_1m_sma: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_hash_rate_1m_sma') - self.hash_rate_1w_sma: MetricPattern5[StoredF64] = MetricPattern5(client, f'{base_path}_hash_rate_1w_sma') - self.hash_rate_1y_sma: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_hash_rate_1y_sma') - self.hash_rate_2m_sma: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_hash_rate_2m_sma') + self.hash_rate_1m_sma: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_hash_rate_1m_sma') + self.hash_rate_1w_sma: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_hash_rate_1w_sma') + self.hash_rate_1y_sma: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_hash_rate_1y_sma') + self.hash_rate_2m_sma: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_hash_rate_2m_sma') self.hash_value_phs: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_phs') self.hash_value_phs_min: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_phs_min') self.hash_value_rebound: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_rebound') self.hash_value_ths: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_ths') self.hash_value_ths_min: MetricPattern1[StoredF32] = MetricPattern1(client, f'{base_path}_hash_value_ths_min') -class CatalogTree_Computed_Blocks_Rewards: +class CatalogTree_Blocks_Rewards: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self._24h_coinbase_sum: MetricPattern26[Sats] = MetricPattern26(client, f'{base_path}_24h_coinbase_sum') - self._24h_coinbase_usd_sum: MetricPattern26[Dollars] = MetricPattern26(client, f'{base_path}_24h_coinbase_usd_sum') + self._24h_coinbase_sum: MetricPattern23[Sats] = MetricPattern23(client, f'{base_path}_24h_coinbase_sum') + self._24h_coinbase_usd_sum: MetricPattern23[Dollars] = MetricPattern23(client, f'{base_path}_24h_coinbase_usd_sum') self.coinbase: CoinbasePattern = CoinbasePattern(client, 'coinbase') - self.fee_dominance: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_fee_dominance') + self.fee_dominance: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_fee_dominance') self.subsidy: CoinbasePattern = CoinbasePattern(client, 'subsidy') - self.subsidy_dominance: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_subsidy_dominance') - self.subsidy_usd_1y_sma: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_subsidy_usd_1y_sma') + self.subsidy_dominance: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_subsidy_dominance') + self.subsidy_usd_1y_sma: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_subsidy_usd_1y_sma') self.unclaimed_rewards: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, 'unclaimed_rewards') -class CatalogTree_Computed_Blocks_Size: +class CatalogTree_Blocks_Size: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.block_size: BlockSizePattern[StoredU64] = BlockSizePattern(client, 'block_size') self.block_vbytes: BlockSizePattern[StoredU64] = BlockSizePattern(client, 'block_vbytes') - self.vbytes: MetricPattern26[StoredU64] = MetricPattern26(client, f'{base_path}_vbytes') + self.vbytes: MetricPattern23[StoredU64] = MetricPattern23(client, f'{base_path}_vbytes') -class CatalogTree_Computed_Blocks_Time: +class CatalogTree_Blocks_Time: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.date: MetricPattern26[Date] = MetricPattern26(client, f'{base_path}_date') - self.date_fixed: MetricPattern26[Date] = MetricPattern26(client, f'{base_path}_date_fixed') + self.date: MetricPattern23[Date] = MetricPattern23(client, f'{base_path}_date') + self.date_fixed: MetricPattern23[Date] = MetricPattern23(client, f'{base_path}_date_fixed') self.timestamp: MetricPattern2[Timestamp] = MetricPattern2(client, f'{base_path}_timestamp') - self.timestamp_fixed: MetricPattern26[Timestamp] = MetricPattern26(client, f'{base_path}_timestamp_fixed') + self.timestamp_fixed: MetricPattern23[Timestamp] = MetricPattern23(client, f'{base_path}_timestamp_fixed') -class CatalogTree_Computed_Blocks_Weight: +class CatalogTree_Blocks_Weight: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.base: MetricPattern23[Weight] = MetricPattern23(client, f'{base_path}_base') self.block_fullness: BitcoinPattern[StoredF32] = BitcoinPattern(client, 'block_fullness') self.block_weight: BlockSizePattern[Weight] = BlockSizePattern(client, 'block_weight') -class CatalogTree_Computed_Cointime: +class CatalogTree_Cointime: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.activity: CatalogTree_Computed_Cointime_Activity = CatalogTree_Computed_Cointime_Activity(client, f'{base_path}_activity') - self.adjusted: CatalogTree_Computed_Cointime_Adjusted = CatalogTree_Computed_Cointime_Adjusted(client, f'{base_path}_adjusted') - self.cap: CatalogTree_Computed_Cointime_Cap = CatalogTree_Computed_Cointime_Cap(client, f'{base_path}_cap') - self.pricing: CatalogTree_Computed_Cointime_Pricing = CatalogTree_Computed_Cointime_Pricing(client, f'{base_path}_pricing') - self.supply: CatalogTree_Computed_Cointime_Supply = CatalogTree_Computed_Cointime_Supply(client, f'{base_path}_supply') - self.value: CatalogTree_Computed_Cointime_Value = CatalogTree_Computed_Cointime_Value(client, f'{base_path}_value') -class CatalogTree_Computed_Cointime_Activity: + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.activity: CatalogTree_Cointime_Activity = CatalogTree_Cointime_Activity(client, f'{base_path}_activity') + self.adjusted: CatalogTree_Cointime_Adjusted = CatalogTree_Cointime_Adjusted(client, f'{base_path}_adjusted') + self.cap: CatalogTree_Cointime_Cap = CatalogTree_Cointime_Cap(client, f'{base_path}_cap') + self.pricing: CatalogTree_Cointime_Pricing = CatalogTree_Cointime_Pricing(client, f'{base_path}_pricing') + self.supply: CatalogTree_Cointime_Supply = CatalogTree_Cointime_Supply(client, f'{base_path}_supply') + self.value: CatalogTree_Cointime_Value = CatalogTree_Cointime_Value(client, f'{base_path}_value') + +class CatalogTree_Cointime_Activity: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.activity_to_vaultedness_ratio: MetricPattern1[StoredF64] = MetricPattern1(client, f'{base_path}_activity_to_vaultedness_ratio') self.coinblocks_created: BlockCountPattern[StoredF64] = BlockCountPattern(client, 'coinblocks_created') @@ -3071,17 +2869,17 @@ class CatalogTree_Computed_Cointime_Activity: self.liveliness: MetricPattern1[StoredF64] = MetricPattern1(client, f'{base_path}_liveliness') self.vaultedness: MetricPattern1[StoredF64] = MetricPattern1(client, f'{base_path}_vaultedness') -class CatalogTree_Computed_Cointime_Adjusted: +class CatalogTree_Cointime_Adjusted: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.cointime_adj_inflation_rate: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_cointime_adj_inflation_rate') - self.cointime_adj_tx_btc_velocity: MetricPattern5[StoredF64] = MetricPattern5(client, f'{base_path}_cointime_adj_tx_btc_velocity') - self.cointime_adj_tx_usd_velocity: MetricPattern5[StoredF64] = MetricPattern5(client, f'{base_path}_cointime_adj_tx_usd_velocity') -class CatalogTree_Computed_Cointime_Cap: + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.cointime_adj_inflation_rate: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_cointime_adj_inflation_rate') + self.cointime_adj_tx_btc_velocity: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_cointime_adj_tx_btc_velocity') + self.cointime_adj_tx_usd_velocity: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_cointime_adj_tx_usd_velocity') + +class CatalogTree_Cointime_Cap: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.active_cap: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_active_cap') self.cointime_cap: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_cointime_cap') @@ -3089,9 +2887,9 @@ class CatalogTree_Computed_Cointime_Cap: self.thermo_cap: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_thermo_cap') self.vaulted_cap: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_vaulted_cap') -class CatalogTree_Computed_Cointime_Pricing: +class CatalogTree_Cointime_Pricing: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.active_price: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_active_price') self.active_price_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, 'active_price_ratio') @@ -3102,24 +2900,24 @@ class CatalogTree_Computed_Cointime_Pricing: self.vaulted_price: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_vaulted_price') self.vaulted_price_ratio: ActivePriceRatioPattern = ActivePriceRatioPattern(client, 'vaulted_price_ratio') -class CatalogTree_Computed_Cointime_Supply: +class CatalogTree_Cointime_Supply: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.active_supply: ActiveSupplyPattern = ActiveSupplyPattern(client, 'active_supply') self.vaulted_supply: ActiveSupplyPattern = ActiveSupplyPattern(client, 'vaulted_supply') -class CatalogTree_Computed_Cointime_Value: +class CatalogTree_Cointime_Value: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.cointime_value_created: BlockCountPattern[StoredF64] = BlockCountPattern(client, 'cointime_value_created') self.cointime_value_destroyed: BlockCountPattern[StoredF64] = BlockCountPattern(client, 'cointime_value_destroyed') self.cointime_value_stored: BlockCountPattern[StoredF64] = BlockCountPattern(client, 'cointime_value_stored') -class CatalogTree_Computed_Constants: +class CatalogTree_Constants: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.constant_0: MetricPattern3[StoredU16] = MetricPattern3(client, f'{base_path}_constant_0') self.constant_1: MetricPattern3[StoredU16] = MetricPattern3(client, f'{base_path}_constant_1') @@ -3140,35 +2938,31 @@ class CatalogTree_Computed_Constants: self.constant_minus_3: MetricPattern3[StoredI16] = MetricPattern3(client, f'{base_path}_constant_minus_3') self.constant_minus_4: MetricPattern3[StoredI16] = MetricPattern3(client, f'{base_path}_constant_minus_4') -class CatalogTree_Computed_Distribution: +class CatalogTree_Distribution: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.addr_count: MetricPattern1[StoredU64] = MetricPattern1(client, f'{base_path}_addr_count') - self.address_cohorts: CatalogTree_Computed_Distribution_AddressCohorts = CatalogTree_Computed_Distribution_AddressCohorts(client, f'{base_path}_address_cohorts') - self.addresses_data: CatalogTree_Computed_Distribution_AddressesData = CatalogTree_Computed_Distribution_AddressesData(client, f'{base_path}_addresses_data') - self.addresstype_to_height_to_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, 'addr_count') - self.addresstype_to_height_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, 'empty_addr_count') - self.addresstype_to_indexes_to_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, 'addr_count') - self.addresstype_to_indexes_to_empty_addr_count: AddresstypeToHeightToAddrCountPattern[StoredU64] = AddresstypeToHeightToAddrCountPattern(client, 'empty_addr_count') - self.any_address_indexes: AddresstypeToHeightToAddrCountPattern[AnyAddressIndex] = AddresstypeToHeightToAddrCountPattern(client, 'anyaddressindex') - self.chain_state: MetricPattern26[SupplyState] = MetricPattern26(client, f'{base_path}_chain_state') - self.empty_addr_count: MetricPattern1[StoredU64] = MetricPattern1(client, f'{base_path}_empty_addr_count') - self.emptyaddressindex: MetricPattern42[EmptyAddressIndex] = MetricPattern42(client, f'{base_path}_emptyaddressindex') - self.loadedaddressindex: MetricPattern41[LoadedAddressIndex] = MetricPattern41(client, f'{base_path}_loadedaddressindex') - self.utxo_cohorts: CatalogTree_Computed_Distribution_UtxoCohorts = CatalogTree_Computed_Distribution_UtxoCohorts(client, f'{base_path}_utxo_cohorts') -class CatalogTree_Computed_Distribution_AddressCohorts: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.amount_range: CatalogTree_Computed_Distribution_AddressCohorts_AmountRange = CatalogTree_Computed_Distribution_AddressCohorts_AmountRange(client, f'{base_path}_amount_range') - self.ge_amount: CatalogTree_Computed_Distribution_AddressCohorts_GeAmount = CatalogTree_Computed_Distribution_AddressCohorts_GeAmount(client, f'{base_path}_ge_amount') - self.lt_amount: CatalogTree_Computed_Distribution_AddressCohorts_LtAmount = CatalogTree_Computed_Distribution_AddressCohorts_LtAmount(client, f'{base_path}_lt_amount') + self.addr_count: AddrCountPattern = AddrCountPattern(client, 'addr_count') + self.address_cohorts: CatalogTree_Distribution_AddressCohorts = CatalogTree_Distribution_AddressCohorts(client, f'{base_path}_address_cohorts') + self.addresses_data: CatalogTree_Distribution_AddressesData = CatalogTree_Distribution_AddressesData(client, f'{base_path}_addresses_data') + self.any_address_indexes: CatalogTree_Distribution_AnyAddressIndexes = CatalogTree_Distribution_AnyAddressIndexes(client, f'{base_path}_any_address_indexes') + self.chain_state: MetricPattern23[SupplyState] = MetricPattern23(client, f'{base_path}_chain_state') + self.empty_addr_count: AddrCountPattern = AddrCountPattern(client, 'empty_addr_count') + self.emptyaddressindex: MetricPattern39[EmptyAddressIndex] = MetricPattern39(client, f'{base_path}_emptyaddressindex') + self.loadedaddressindex: MetricPattern38[LoadedAddressIndex] = MetricPattern38(client, f'{base_path}_loadedaddressindex') + self.utxo_cohorts: CatalogTree_Distribution_UtxoCohorts = CatalogTree_Distribution_UtxoCohorts(client, f'{base_path}_utxo_cohorts') -class CatalogTree_Computed_Distribution_AddressCohorts_AmountRange: +class CatalogTree_Distribution_AddressCohorts: """Catalog tree node.""" - + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.amount_range: CatalogTree_Distribution_AddressCohorts_AmountRange = CatalogTree_Distribution_AddressCohorts_AmountRange(client, f'{base_path}_amount_range') + self.ge_amount: CatalogTree_Distribution_AddressCohorts_GeAmount = CatalogTree_Distribution_AddressCohorts_GeAmount(client, f'{base_path}_ge_amount') + self.lt_amount: CatalogTree_Distribution_AddressCohorts_LtAmount = CatalogTree_Distribution_AddressCohorts_LtAmount(client, f'{base_path}_lt_amount') + +class CatalogTree_Distribution_AddressCohorts_AmountRange: + """Catalog tree node.""" + def __init__(self, client: BrkClientBase, base_path: str = ''): self._0sats: _0satsPattern = _0satsPattern(client, 'addrs_with_0sats') self._100btc_to_1k_btc: _0satsPattern = _0satsPattern(client, 'addrs_above_100btc_under_1k_btc') @@ -3186,9 +2980,9 @@ class CatalogTree_Computed_Distribution_AddressCohorts_AmountRange: self._1m_sats_to_10m_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_1m_sats_under_10m_sats') self._1sat_to_10sats: _0satsPattern = _0satsPattern(client, 'addrs_above_1sat_under_10sats') -class CatalogTree_Computed_Distribution_AddressCohorts_GeAmount: +class CatalogTree_Distribution_AddressCohorts_GeAmount: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._100btc: _0satsPattern = _0satsPattern(client, 'addrs_above_100btc') self._100k_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_100k_sats') @@ -3204,9 +2998,9 @@ class CatalogTree_Computed_Distribution_AddressCohorts_GeAmount: self._1m_sats: _0satsPattern = _0satsPattern(client, 'addrs_above_1m_sats') self._1sat: _0satsPattern = _0satsPattern(client, 'addrs_above_1sat') -class CatalogTree_Computed_Distribution_AddressCohorts_LtAmount: +class CatalogTree_Distribution_AddressCohorts_LtAmount: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._100btc: _0satsPattern = _0satsPattern(client, 'addrs_under_100btc') self._100k_btc: _0satsPattern = _0satsPattern(client, 'addrs_under_100k_btc') @@ -3222,32 +3016,45 @@ class CatalogTree_Computed_Distribution_AddressCohorts_LtAmount: self._1k_sats: _0satsPattern = _0satsPattern(client, 'addrs_under_1k_sats') self._1m_sats: _0satsPattern = _0satsPattern(client, 'addrs_under_1m_sats') -class CatalogTree_Computed_Distribution_AddressesData: +class CatalogTree_Distribution_AddressesData: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.empty: MetricPattern42[EmptyAddressData] = MetricPattern42(client, f'{base_path}_empty') - self.loaded: MetricPattern41[LoadedAddressData] = MetricPattern41(client, f'{base_path}_loaded') -class CatalogTree_Computed_Distribution_UtxoCohorts: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.age_range: CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange = CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange(client, f'{base_path}_age_range') - self.all: CatalogTree_Computed_Distribution_UtxoCohorts_All = CatalogTree_Computed_Distribution_UtxoCohorts_All(client, f'{base_path}_all') - self.amount_range: CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange = CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange(client, f'{base_path}_amount_range') - self.epoch: CatalogTree_Computed_Distribution_UtxoCohorts_Epoch = CatalogTree_Computed_Distribution_UtxoCohorts_Epoch(client, f'{base_path}_epoch') - self.ge_amount: CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount = CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount(client, f'{base_path}_ge_amount') - self.lt_amount: CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount = CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount(client, f'{base_path}_lt_amount') - self.max_age: CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge = CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge(client, f'{base_path}_max_age') - self.min_age: CatalogTree_Computed_Distribution_UtxoCohorts_MinAge = CatalogTree_Computed_Distribution_UtxoCohorts_MinAge(client, f'{base_path}_min_age') - self.term: CatalogTree_Computed_Distribution_UtxoCohorts_Term = CatalogTree_Computed_Distribution_UtxoCohorts_Term(client, f'{base_path}_term') - self.type_: CatalogTree_Computed_Distribution_UtxoCohorts_Type = CatalogTree_Computed_Distribution_UtxoCohorts_Type(client, f'{base_path}_type_') - self.year: CatalogTree_Computed_Distribution_UtxoCohorts_Year = CatalogTree_Computed_Distribution_UtxoCohorts_Year(client, f'{base_path}_year') + self.empty: MetricPattern39[EmptyAddressData] = MetricPattern39(client, f'{base_path}_empty') + self.loaded: MetricPattern38[LoadedAddressData] = MetricPattern38(client, f'{base_path}_loaded') -class CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange: +class CatalogTree_Distribution_AnyAddressIndexes: """Catalog tree node.""" - + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.p2a: MetricPattern27[AnyAddressIndex] = MetricPattern27(client, f'{base_path}_p2a') + self.p2pk33: MetricPattern29[AnyAddressIndex] = MetricPattern29(client, f'{base_path}_p2pk33') + self.p2pk65: MetricPattern30[AnyAddressIndex] = MetricPattern30(client, f'{base_path}_p2pk65') + self.p2pkh: MetricPattern31[AnyAddressIndex] = MetricPattern31(client, f'{base_path}_p2pkh') + self.p2sh: MetricPattern32[AnyAddressIndex] = MetricPattern32(client, f'{base_path}_p2sh') + self.p2tr: MetricPattern33[AnyAddressIndex] = MetricPattern33(client, f'{base_path}_p2tr') + self.p2wpkh: MetricPattern34[AnyAddressIndex] = MetricPattern34(client, f'{base_path}_p2wpkh') + self.p2wsh: MetricPattern35[AnyAddressIndex] = MetricPattern35(client, f'{base_path}_p2wsh') + +class CatalogTree_Distribution_UtxoCohorts: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.age_range: CatalogTree_Distribution_UtxoCohorts_AgeRange = CatalogTree_Distribution_UtxoCohorts_AgeRange(client, f'{base_path}_age_range') + self.all: CatalogTree_Distribution_UtxoCohorts_All = CatalogTree_Distribution_UtxoCohorts_All(client, f'{base_path}_all') + self.amount_range: CatalogTree_Distribution_UtxoCohorts_AmountRange = CatalogTree_Distribution_UtxoCohorts_AmountRange(client, f'{base_path}_amount_range') + self.epoch: CatalogTree_Distribution_UtxoCohorts_Epoch = CatalogTree_Distribution_UtxoCohorts_Epoch(client, f'{base_path}_epoch') + self.ge_amount: CatalogTree_Distribution_UtxoCohorts_GeAmount = CatalogTree_Distribution_UtxoCohorts_GeAmount(client, f'{base_path}_ge_amount') + self.lt_amount: CatalogTree_Distribution_UtxoCohorts_LtAmount = CatalogTree_Distribution_UtxoCohorts_LtAmount(client, f'{base_path}_lt_amount') + self.max_age: CatalogTree_Distribution_UtxoCohorts_MaxAge = CatalogTree_Distribution_UtxoCohorts_MaxAge(client, f'{base_path}_max_age') + self.min_age: CatalogTree_Distribution_UtxoCohorts_MinAge = CatalogTree_Distribution_UtxoCohorts_MinAge(client, f'{base_path}_min_age') + self.term: CatalogTree_Distribution_UtxoCohorts_Term = CatalogTree_Distribution_UtxoCohorts_Term(client, f'{base_path}_term') + self.type_: CatalogTree_Distribution_UtxoCohorts_Type = CatalogTree_Distribution_UtxoCohorts_Type(client, f'{base_path}_type_') + self.year: CatalogTree_Distribution_UtxoCohorts_Year = CatalogTree_Distribution_UtxoCohorts_Year(client, f'{base_path}_year') + +class CatalogTree_Distribution_UtxoCohorts_AgeRange: + """Catalog tree node.""" + def __init__(self, client: BrkClientBase, base_path: str = ''): self._10y_to_12y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_10y_up_to_12y_old') self._12y_to_15y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_12y_up_to_15y_old') @@ -3271,20 +3078,20 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_AgeRange: self.from_15y: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_at_least_15y_old') self.up_to_1h: _10yTo12yPattern = _10yTo12yPattern(client, 'utxos_up_to_1h_old') -class CatalogTree_Computed_Distribution_UtxoCohorts_All: +class CatalogTree_Distribution_UtxoCohorts_All: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.activity: ActivityPattern2 = ActivityPattern2(client, '') self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, '') self.realized: RealizedPattern3 = RealizedPattern3(client, '') - self.relative: CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative = CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative(client, f'{base_path}_relative') - self.supply: SupplyPattern3 = SupplyPattern3(client, '') + self.relative: CatalogTree_Distribution_UtxoCohorts_All_Relative = CatalogTree_Distribution_UtxoCohorts_All_Relative(client, f'{base_path}_relative') + self.supply: SupplyPattern2 = SupplyPattern2(client, '') self.unrealized: UnrealizedPattern = UnrealizedPattern(client, '') -class CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative: +class CatalogTree_Distribution_UtxoCohorts_All_Relative: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, f'{base_path}_neg_unrealized_loss_rel_to_own_total_unrealized_pnl') self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, f'{base_path}_net_unrealized_pnl_rel_to_own_total_unrealized_pnl') @@ -3293,9 +3100,9 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_All_Relative: self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, f'{base_path}_unrealized_loss_rel_to_own_total_unrealized_pnl') self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern3[StoredF32] = MetricPattern3(client, f'{base_path}_unrealized_profit_rel_to_own_total_unrealized_pnl') -class CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange: +class CatalogTree_Distribution_UtxoCohorts_AmountRange: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._0sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_with_0sats') self._100btc_to_1k_btc: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_100btc_under_1k_btc') @@ -3313,9 +3120,9 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_AmountRange: self._1m_sats_to_10m_sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1m_sats_under_10m_sats') self._1sat_to_10sats: _0satsPattern2 = _0satsPattern2(client, 'utxos_above_1sat_under_10sats') -class CatalogTree_Computed_Distribution_UtxoCohorts_Epoch: +class CatalogTree_Distribution_UtxoCohorts_Epoch: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._0: _0satsPattern2 = _0satsPattern2(client, 'epoch_0') self._1: _0satsPattern2 = _0satsPattern2(client, 'epoch_1') @@ -3323,9 +3130,9 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_Epoch: self._3: _0satsPattern2 = _0satsPattern2(client, 'epoch_3') self._4: _0satsPattern2 = _0satsPattern2(client, 'epoch_4') -class CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount: +class CatalogTree_Distribution_UtxoCohorts_GeAmount: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._100btc: _100btcPattern = _100btcPattern(client, 'utxos_above_100btc') self._100k_sats: _100btcPattern = _100btcPattern(client, 'utxos_above_100k_sats') @@ -3341,9 +3148,9 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_GeAmount: self._1m_sats: _100btcPattern = _100btcPattern(client, 'utxos_above_1m_sats') self._1sat: _100btcPattern = _100btcPattern(client, 'utxos_above_1sat') -class CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount: +class CatalogTree_Distribution_UtxoCohorts_LtAmount: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._100btc: _100btcPattern = _100btcPattern(client, 'utxos_under_100btc') self._100k_btc: _100btcPattern = _100btcPattern(client, 'utxos_under_100k_btc') @@ -3359,9 +3166,9 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_LtAmount: self._1k_sats: _100btcPattern = _100btcPattern(client, 'utxos_under_1k_sats') self._1m_sats: _100btcPattern = _100btcPattern(client, 'utxos_under_1m_sats') -class CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge: +class CatalogTree_Distribution_UtxoCohorts_MaxAge: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._10y: _10yPattern = _10yPattern(client, 'utxos_up_to_10y_old') self._12y: _10yPattern = _10yPattern(client, 'utxos_up_to_12y_old') @@ -3382,9 +3189,9 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_MaxAge: self._7y: _10yPattern = _10yPattern(client, 'utxos_up_to_7y_old') self._8y: _10yPattern = _10yPattern(client, 'utxos_up_to_8y_old') -class CatalogTree_Computed_Distribution_UtxoCohorts_MinAge: +class CatalogTree_Distribution_UtxoCohorts_MinAge: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._10y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_10y_old') self._12y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_12y_old') @@ -3405,38 +3212,38 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_MinAge: self._7y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_7y_old') self._8y: _100btcPattern = _100btcPattern(client, 'utxos_at_least_8y_old') -class CatalogTree_Computed_Distribution_UtxoCohorts_Term: +class CatalogTree_Distribution_UtxoCohorts_Term: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.long: CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long = CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long(client, f'{base_path}_long') - self.short: CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short = CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short(client, f'{base_path}_short') -class CatalogTree_Computed_Distribution_UtxoCohorts_Term_Long: + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.long: CatalogTree_Distribution_UtxoCohorts_Term_Long = CatalogTree_Distribution_UtxoCohorts_Term_Long(client, f'{base_path}_long') + self.short: CatalogTree_Distribution_UtxoCohorts_Term_Short = CatalogTree_Distribution_UtxoCohorts_Term_Short(client, f'{base_path}_short') + +class CatalogTree_Distribution_UtxoCohorts_Term_Long: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.activity: ActivityPattern2 = ActivityPattern2(client, 'lth') self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, 'lth') self.realized: RealizedPattern2 = RealizedPattern2(client, 'lth') self.relative: RelativePattern5 = RelativePattern5(client, 'lth') - self.supply: SupplyPattern3 = SupplyPattern3(client, 'lth') + self.supply: SupplyPattern2 = SupplyPattern2(client, 'lth') self.unrealized: UnrealizedPattern = UnrealizedPattern(client, 'lth') -class CatalogTree_Computed_Distribution_UtxoCohorts_Term_Short: +class CatalogTree_Distribution_UtxoCohorts_Term_Short: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.activity: ActivityPattern2 = ActivityPattern2(client, 'sth') self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, 'sth') self.realized: RealizedPattern3 = RealizedPattern3(client, 'sth') self.relative: RelativePattern5 = RelativePattern5(client, 'sth') - self.supply: SupplyPattern3 = SupplyPattern3(client, 'sth') + self.supply: SupplyPattern2 = SupplyPattern2(client, 'sth') self.unrealized: UnrealizedPattern = UnrealizedPattern(client, 'sth') -class CatalogTree_Computed_Distribution_UtxoCohorts_Type: +class CatalogTree_Distribution_UtxoCohorts_Type: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.empty: _0satsPattern2 = _0satsPattern2(client, 'empty_outputs') self.p2a: _0satsPattern2 = _0satsPattern2(client, 'p2a') @@ -3450,9 +3257,9 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_Type: self.p2wsh: _0satsPattern2 = _0satsPattern2(client, 'p2wsh') self.unknown: _0satsPattern2 = _0satsPattern2(client, 'unknown_outputs') -class CatalogTree_Computed_Distribution_UtxoCohorts_Year: +class CatalogTree_Distribution_UtxoCohorts_Year: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._2009: _0satsPattern2 = _0satsPattern2(client, 'year_2009') self._2010: _0satsPattern2 = _0satsPattern2(client, 'year_2010') @@ -3473,135 +3280,140 @@ class CatalogTree_Computed_Distribution_UtxoCohorts_Year: self._2025: _0satsPattern2 = _0satsPattern2(client, 'year_2025') self._2026: _0satsPattern2 = _0satsPattern2(client, 'year_2026') -class CatalogTree_Computed_Indexes: +class CatalogTree_Indexes: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.address: CatalogTree_Computed_Indexes_Address = CatalogTree_Computed_Indexes_Address(client, f'{base_path}_address') - self.block: CatalogTree_Computed_Indexes_Block = CatalogTree_Computed_Indexes_Block(client, f'{base_path}_block') - self.time: CatalogTree_Computed_Indexes_Time = CatalogTree_Computed_Indexes_Time(client, f'{base_path}_time') - self.transaction: CatalogTree_Computed_Indexes_Transaction = CatalogTree_Computed_Indexes_Transaction(client, f'{base_path}_transaction') -class CatalogTree_Computed_Indexes_Address: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.emptyoutputindex: MetricPattern25[EmptyOutputIndex] = MetricPattern25(client, f'{base_path}_emptyoutputindex') - self.opreturnindex: MetricPattern28[OpReturnIndex] = MetricPattern28(client, f'{base_path}_opreturnindex') - self.p2aaddressindex: MetricPattern30[P2AAddressIndex] = MetricPattern30(client, f'{base_path}_p2aaddressindex') - self.p2msoutputindex: MetricPattern31[P2MSOutputIndex] = MetricPattern31(client, f'{base_path}_p2msoutputindex') - self.p2pk33addressindex: MetricPattern32[P2PK33AddressIndex] = MetricPattern32(client, f'{base_path}_p2pk33addressindex') - self.p2pk65addressindex: MetricPattern33[P2PK65AddressIndex] = MetricPattern33(client, f'{base_path}_p2pk65addressindex') - self.p2pkhaddressindex: MetricPattern34[P2PKHAddressIndex] = MetricPattern34(client, f'{base_path}_p2pkhaddressindex') - self.p2shaddressindex: MetricPattern35[P2SHAddressIndex] = MetricPattern35(client, f'{base_path}_p2shaddressindex') - self.p2traddressindex: MetricPattern36[P2TRAddressIndex] = MetricPattern36(client, f'{base_path}_p2traddressindex') - self.p2wpkhaddressindex: MetricPattern37[P2WPKHAddressIndex] = MetricPattern37(client, f'{base_path}_p2wpkhaddressindex') - self.p2wshaddressindex: MetricPattern38[P2WSHAddressIndex] = MetricPattern38(client, f'{base_path}_p2wshaddressindex') - self.unknownoutputindex: MetricPattern40[UnknownOutputIndex] = MetricPattern40(client, f'{base_path}_unknownoutputindex') + self.address: CatalogTree_Indexes_Address = CatalogTree_Indexes_Address(client, f'{base_path}_address') + self.block: CatalogTree_Indexes_Block = CatalogTree_Indexes_Block(client, f'{base_path}_block') + self.time: CatalogTree_Indexes_Time = CatalogTree_Indexes_Time(client, f'{base_path}_time') + self.transaction: CatalogTree_Indexes_Transaction = CatalogTree_Indexes_Transaction(client, f'{base_path}_transaction') -class CatalogTree_Computed_Indexes_Block: +class CatalogTree_Indexes_Address: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.dateindex: MetricPattern26[DateIndex] = MetricPattern26(client, f'{base_path}_dateindex') - self.difficultyepoch: MetricPattern15[DifficultyEpoch] = MetricPattern15(client, f'{base_path}_difficultyepoch') - self.first_height: MetricPattern14[Height] = MetricPattern14(client, f'{base_path}_first_height') - self.halvingepoch: MetricPattern16[HalvingEpoch] = MetricPattern16(client, f'{base_path}_halvingepoch') - self.height: MetricPattern26[Height] = MetricPattern26(client, f'{base_path}_height') - self.height_count: MetricPattern24[StoredU64] = MetricPattern24(client, f'{base_path}_height_count') - self.txindex_count: MetricPattern26[StoredU64] = MetricPattern26(client, f'{base_path}_txindex_count') -class CatalogTree_Computed_Indexes_Time: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.date: MetricPattern22[Date] = MetricPattern22(client, f'{base_path}_date') - self.dateindex: MetricPattern22[DateIndex] = MetricPattern22(client, f'{base_path}_dateindex') - self.dateindex_count: MetricPattern20[StoredU64] = MetricPattern20(client, f'{base_path}_dateindex_count') - self.decadeindex: MetricPattern13[DecadeIndex] = MetricPattern13(client, f'{base_path}_decadeindex') - self.first_dateindex: MetricPattern20[DateIndex] = MetricPattern20(client, f'{base_path}_first_dateindex') - self.first_height: MetricPattern22[Height] = MetricPattern22(client, f'{base_path}_first_height') - self.first_monthindex: MetricPattern9[MonthIndex] = MetricPattern9(client, f'{base_path}_first_monthindex') - self.first_yearindex: MetricPattern23[YearIndex] = MetricPattern23(client, f'{base_path}_first_yearindex') - self.height_count: MetricPattern22[StoredU64] = MetricPattern22(client, f'{base_path}_height_count') - self.monthindex: MetricPattern11[MonthIndex] = MetricPattern11(client, f'{base_path}_monthindex') - self.monthindex_count: MetricPattern9[StoredU64] = MetricPattern9(client, f'{base_path}_monthindex_count') - self.quarterindex: MetricPattern18[QuarterIndex] = MetricPattern18(client, f'{base_path}_quarterindex') - self.semesterindex: MetricPattern19[SemesterIndex] = MetricPattern19(client, f'{base_path}_semesterindex') - self.weekindex: MetricPattern12[WeekIndex] = MetricPattern12(client, f'{base_path}_weekindex') - self.yearindex: MetricPattern21[YearIndex] = MetricPattern21(client, f'{base_path}_yearindex') - self.yearindex_count: MetricPattern23[StoredU64] = MetricPattern23(client, f'{base_path}_yearindex_count') + self.emptyoutputindex: MetricPattern22[EmptyOutputIndex] = MetricPattern22(client, f'{base_path}_emptyoutputindex') + self.opreturnindex: MetricPattern25[OpReturnIndex] = MetricPattern25(client, f'{base_path}_opreturnindex') + self.p2aaddressindex: MetricPattern27[P2AAddressIndex] = MetricPattern27(client, f'{base_path}_p2aaddressindex') + self.p2msoutputindex: MetricPattern28[P2MSOutputIndex] = MetricPattern28(client, f'{base_path}_p2msoutputindex') + self.p2pk33addressindex: MetricPattern29[P2PK33AddressIndex] = MetricPattern29(client, f'{base_path}_p2pk33addressindex') + self.p2pk65addressindex: MetricPattern30[P2PK65AddressIndex] = MetricPattern30(client, f'{base_path}_p2pk65addressindex') + self.p2pkhaddressindex: MetricPattern31[P2PKHAddressIndex] = MetricPattern31(client, f'{base_path}_p2pkhaddressindex') + self.p2shaddressindex: MetricPattern32[P2SHAddressIndex] = MetricPattern32(client, f'{base_path}_p2shaddressindex') + self.p2traddressindex: MetricPattern33[P2TRAddressIndex] = MetricPattern33(client, f'{base_path}_p2traddressindex') + self.p2wpkhaddressindex: MetricPattern34[P2WPKHAddressIndex] = MetricPattern34(client, f'{base_path}_p2wpkhaddressindex') + self.p2wshaddressindex: MetricPattern35[P2WSHAddressIndex] = MetricPattern35(client, f'{base_path}_p2wshaddressindex') + self.unknownoutputindex: MetricPattern37[UnknownOutputIndex] = MetricPattern37(client, f'{base_path}_unknownoutputindex') -class CatalogTree_Computed_Indexes_Transaction: +class CatalogTree_Indexes_Block: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.input_count: MetricPattern39[StoredU64] = MetricPattern39(client, f'{base_path}_input_count') - self.output_count: MetricPattern39[StoredU64] = MetricPattern39(client, f'{base_path}_output_count') - self.txindex: MetricPattern39[TxIndex] = MetricPattern39(client, f'{base_path}_txindex') - self.txinindex: MetricPattern27[TxInIndex] = MetricPattern27(client, f'{base_path}_txinindex') - self.txoutindex: MetricPattern29[TxOutIndex] = MetricPattern29(client, f'{base_path}_txoutindex') -class CatalogTree_Computed_Inputs: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.count: CatalogTree_Computed_Inputs_Count = CatalogTree_Computed_Inputs_Count(client, f'{base_path}_count') - self.spent: CatalogTree_Computed_Inputs_Spent = CatalogTree_Computed_Inputs_Spent(client, f'{base_path}_spent') + self.dateindex: MetricPattern23[DateIndex] = MetricPattern23(client, f'{base_path}_dateindex') + self.difficultyepoch: MetricPattern12[DifficultyEpoch] = MetricPattern12(client, f'{base_path}_difficultyepoch') + self.first_height: MetricPattern11[Height] = MetricPattern11(client, f'{base_path}_first_height') + self.halvingepoch: MetricPattern13[HalvingEpoch] = MetricPattern13(client, f'{base_path}_halvingepoch') + self.height: MetricPattern23[Height] = MetricPattern23(client, f'{base_path}_height') + self.height_count: MetricPattern21[StoredU64] = MetricPattern21(client, f'{base_path}_height_count') + self.txindex_count: MetricPattern23[StoredU64] = MetricPattern23(client, f'{base_path}_txindex_count') -class CatalogTree_Computed_Inputs_Count: +class CatalogTree_Indexes_Time: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.count: CountPattern2[StoredU64] = CountPattern2(client, 'input_count') -class CatalogTree_Computed_Inputs_Spent: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.txoutindex: MetricPattern27[TxOutIndex] = MetricPattern27(client, f'{base_path}_txoutindex') - self.value: MetricPattern27[Sats] = MetricPattern27(client, f'{base_path}_value') + self.date: MetricPattern19[Date] = MetricPattern19(client, f'{base_path}_date') + self.dateindex: MetricPattern19[DateIndex] = MetricPattern19(client, f'{base_path}_dateindex') + self.dateindex_count: MetricPattern17[StoredU64] = MetricPattern17(client, f'{base_path}_dateindex_count') + self.decadeindex: MetricPattern10[DecadeIndex] = MetricPattern10(client, f'{base_path}_decadeindex') + self.first_dateindex: MetricPattern17[DateIndex] = MetricPattern17(client, f'{base_path}_first_dateindex') + self.first_height: MetricPattern19[Height] = MetricPattern19(client, f'{base_path}_first_height') + self.first_monthindex: MetricPattern6[MonthIndex] = MetricPattern6(client, f'{base_path}_first_monthindex') + self.first_yearindex: MetricPattern20[YearIndex] = MetricPattern20(client, f'{base_path}_first_yearindex') + self.height_count: MetricPattern19[StoredU64] = MetricPattern19(client, f'{base_path}_height_count') + self.monthindex: MetricPattern8[MonthIndex] = MetricPattern8(client, f'{base_path}_monthindex') + self.monthindex_count: MetricPattern6[StoredU64] = MetricPattern6(client, f'{base_path}_monthindex_count') + self.quarterindex: MetricPattern15[QuarterIndex] = MetricPattern15(client, f'{base_path}_quarterindex') + self.semesterindex: MetricPattern16[SemesterIndex] = MetricPattern16(client, f'{base_path}_semesterindex') + self.weekindex: MetricPattern9[WeekIndex] = MetricPattern9(client, f'{base_path}_weekindex') + self.yearindex: MetricPattern18[YearIndex] = MetricPattern18(client, f'{base_path}_yearindex') + self.yearindex_count: MetricPattern20[StoredU64] = MetricPattern20(client, f'{base_path}_yearindex_count') -class CatalogTree_Computed_Market: +class CatalogTree_Indexes_Transaction: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.ath: CatalogTree_Computed_Market_Ath = CatalogTree_Computed_Market_Ath(client, f'{base_path}_ath') - self.dca: CatalogTree_Computed_Market_Dca = CatalogTree_Computed_Market_Dca(client, f'{base_path}_dca') - self.indicators: CatalogTree_Computed_Market_Indicators = CatalogTree_Computed_Market_Indicators(client, f'{base_path}_indicators') - self.lookback: CatalogTree_Computed_Market_Lookback = CatalogTree_Computed_Market_Lookback(client, f'{base_path}_lookback') - self.moving_average: CatalogTree_Computed_Market_MovingAverage = CatalogTree_Computed_Market_MovingAverage(client, f'{base_path}_moving_average') - self.range: CatalogTree_Computed_Market_Range = CatalogTree_Computed_Market_Range(client, f'{base_path}_range') - self.returns: CatalogTree_Computed_Market_Returns = CatalogTree_Computed_Market_Returns(client, f'{base_path}_returns') - self.volatility: CatalogTree_Computed_Market_Volatility = CatalogTree_Computed_Market_Volatility(client, f'{base_path}_volatility') -class CatalogTree_Computed_Market_Ath: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.days_since_price_ath: MetricPattern5[StoredU16] = MetricPattern5(client, f'{base_path}_days_since_price_ath') - self.max_days_between_price_aths: MetricPattern5[StoredU16] = MetricPattern5(client, f'{base_path}_max_days_between_price_aths') - self.max_years_between_price_aths: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_max_years_between_price_aths') + self.input_count: MetricPattern36[StoredU64] = MetricPattern36(client, f'{base_path}_input_count') + self.output_count: MetricPattern36[StoredU64] = MetricPattern36(client, f'{base_path}_output_count') + self.txindex: MetricPattern36[TxIndex] = MetricPattern36(client, f'{base_path}_txindex') + self.txinindex: MetricPattern24[TxInIndex] = MetricPattern24(client, f'{base_path}_txinindex') + self.txoutindex: MetricPattern26[TxOutIndex] = MetricPattern26(client, f'{base_path}_txoutindex') + +class CatalogTree_Inputs: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.count: CatalogTree_Inputs_Count = CatalogTree_Inputs_Count(client, f'{base_path}_count') + self.first_txinindex: MetricPattern23[TxInIndex] = MetricPattern23(client, f'{base_path}_first_txinindex') + self.outpoint: MetricPattern24[OutPoint] = MetricPattern24(client, f'{base_path}_outpoint') + self.outputtype: MetricPattern24[OutputType] = MetricPattern24(client, f'{base_path}_outputtype') + self.spent: CatalogTree_Inputs_Spent = CatalogTree_Inputs_Spent(client, f'{base_path}_spent') + self.txindex: MetricPattern24[TxIndex] = MetricPattern24(client, f'{base_path}_txindex') + self.typeindex: MetricPattern24[TypeIndex] = MetricPattern24(client, f'{base_path}_typeindex') + +class CatalogTree_Inputs_Count: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.count: BlockSizePattern[StoredU64] = BlockSizePattern(client, 'input_count') + +class CatalogTree_Inputs_Spent: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.txoutindex: MetricPattern24[TxOutIndex] = MetricPattern24(client, f'{base_path}_txoutindex') + self.value: MetricPattern24[Sats] = MetricPattern24(client, f'{base_path}_value') + +class CatalogTree_Market: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.ath: CatalogTree_Market_Ath = CatalogTree_Market_Ath(client, f'{base_path}_ath') + self.dca: CatalogTree_Market_Dca = CatalogTree_Market_Dca(client, f'{base_path}_dca') + self.indicators: CatalogTree_Market_Indicators = CatalogTree_Market_Indicators(client, f'{base_path}_indicators') + self.lookback: CatalogTree_Market_Lookback = CatalogTree_Market_Lookback(client, f'{base_path}_lookback') + self.moving_average: CatalogTree_Market_MovingAverage = CatalogTree_Market_MovingAverage(client, f'{base_path}_moving_average') + self.range: CatalogTree_Market_Range = CatalogTree_Market_Range(client, f'{base_path}_range') + self.returns: CatalogTree_Market_Returns = CatalogTree_Market_Returns(client, f'{base_path}_returns') + self.volatility: CatalogTree_Market_Volatility = CatalogTree_Market_Volatility(client, f'{base_path}_volatility') + +class CatalogTree_Market_Ath: + """Catalog tree node.""" + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.days_since_price_ath: MetricPattern4[StoredU16] = MetricPattern4(client, f'{base_path}_days_since_price_ath') + self.max_days_between_price_aths: MetricPattern4[StoredU16] = MetricPattern4(client, f'{base_path}_max_days_between_price_aths') + self.max_years_between_price_aths: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_max_years_between_price_aths') self.price_ath: MetricPattern3[Dollars] = MetricPattern3(client, f'{base_path}_price_ath') self.price_drawdown: MetricPattern3[StoredF32] = MetricPattern3(client, f'{base_path}_price_drawdown') - self.years_since_price_ath: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_years_since_price_ath') + self.years_since_price_ath: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_years_since_price_ath') -class CatalogTree_Computed_Market_Dca: +class CatalogTree_Market_Dca: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.class_average_price: ClassAveragePricePattern[Dollars] = ClassAveragePricePattern(client, 'dca_class') self.class_returns: ClassAveragePricePattern[StoredF32] = ClassAveragePricePattern(client, 'dca_class') - self.class_stack: CatalogTree_Computed_Market_Dca_ClassStack = CatalogTree_Computed_Market_Dca_ClassStack(client, f'{base_path}_class_stack') + self.class_stack: CatalogTree_Market_Dca_ClassStack = CatalogTree_Market_Dca_ClassStack(client, f'{base_path}_class_stack') self.period_average_price: PeriodAveragePricePattern[Dollars] = PeriodAveragePricePattern(client, 'dca_average_price') self.period_cagr: PeriodCagrPattern = PeriodCagrPattern(client, 'dca_cagr') self.period_lump_sum_stack: PeriodLumpSumStackPattern = PeriodLumpSumStackPattern(client, '') self.period_returns: PeriodAveragePricePattern[StoredF32] = PeriodAveragePricePattern(client, 'dca_returns') self.period_stack: PeriodLumpSumStackPattern = PeriodLumpSumStackPattern(client, '') -class CatalogTree_Computed_Market_Dca_ClassStack: +class CatalogTree_Market_Dca_ClassStack: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._2015: ActiveSupplyPattern = ActiveSupplyPattern(client, 'dca_class_2015_stack') self._2016: ActiveSupplyPattern = ActiveSupplyPattern(client, 'dca_class_2016_stack') @@ -3615,39 +3427,39 @@ class CatalogTree_Computed_Market_Dca_ClassStack: self._2024: ActiveSupplyPattern = ActiveSupplyPattern(client, 'dca_class_2024_stack') self._2025: ActiveSupplyPattern = ActiveSupplyPattern(client, 'dca_class_2025_stack') -class CatalogTree_Computed_Market_Indicators: +class CatalogTree_Market_Indicators: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.gini: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_gini') - self.macd_histogram: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_macd_histogram') - self.macd_line: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_macd_line') - self.macd_signal: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_macd_signal') - self.nvt: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_nvt') - self.pi_cycle: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_pi_cycle') - self.puell_multiple: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_puell_multiple') - self.rsi_14d: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_rsi_14d') - self.rsi_14d_max: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_rsi_14d_max') - self.rsi_14d_min: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_rsi_14d_min') - self.rsi_average_gain_14d: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_rsi_average_gain_14d') - self.rsi_average_loss_14d: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_rsi_average_loss_14d') - self.rsi_gains: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_rsi_gains') - self.rsi_losses: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_rsi_losses') - self.stoch_d: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_stoch_d') - self.stoch_k: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_stoch_k') - self.stoch_rsi: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_stoch_rsi') - self.stoch_rsi_d: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_stoch_rsi_d') - self.stoch_rsi_k: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_stoch_rsi_k') -class CatalogTree_Computed_Market_Lookback: + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.gini: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_gini') + self.macd_histogram: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_macd_histogram') + self.macd_line: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_macd_line') + self.macd_signal: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_macd_signal') + self.nvt: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_nvt') + self.pi_cycle: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_pi_cycle') + self.puell_multiple: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_puell_multiple') + self.rsi_14d: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_rsi_14d') + self.rsi_14d_max: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_rsi_14d_max') + self.rsi_14d_min: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_rsi_14d_min') + self.rsi_average_gain_14d: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_rsi_average_gain_14d') + self.rsi_average_loss_14d: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_rsi_average_loss_14d') + self.rsi_gains: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_rsi_gains') + self.rsi_losses: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_rsi_losses') + self.stoch_d: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_stoch_d') + self.stoch_k: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_stoch_k') + self.stoch_rsi: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_stoch_rsi') + self.stoch_rsi_d: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_stoch_rsi_d') + self.stoch_rsi_k: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_stoch_rsi_k') + +class CatalogTree_Market_Lookback: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.price_ago: PriceAgoPattern[Dollars] = PriceAgoPattern(client, 'price') -class CatalogTree_Computed_Market_MovingAverage: +class CatalogTree_Market_MovingAverage: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.price_111d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_111d_sma') self.price_12d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_12d_ema') @@ -3663,8 +3475,8 @@ class CatalogTree_Computed_Market_MovingAverage: self.price_1y_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_1y_sma') self.price_200d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_200d_ema') self.price_200d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_200d_sma') - self.price_200d_sma_x0_8: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_200d_sma_x0_8') - self.price_200d_sma_x2_4: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_200d_sma_x2_4') + self.price_200d_sma_x0_8: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_200d_sma_x0_8') + self.price_200d_sma_x2_4: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_200d_sma_x2_4') self.price_200w_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_200w_ema') self.price_200w_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_200w_sma') self.price_21d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_21d_ema') @@ -3675,7 +3487,7 @@ class CatalogTree_Computed_Market_MovingAverage: self.price_34d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_34d_ema') self.price_34d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_34d_sma') self.price_350d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_350d_sma') - self.price_350d_sma_x2: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_350d_sma_x2') + self.price_350d_sma_x2: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_350d_sma_x2') self.price_4y_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_4y_ema') self.price_4y_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_4y_sma') self.price_55d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_55d_ema') @@ -3685,25 +3497,25 @@ class CatalogTree_Computed_Market_MovingAverage: self.price_8d_ema: Price111dSmaPattern = Price111dSmaPattern(client, 'price_8d_ema') self.price_8d_sma: Price111dSmaPattern = Price111dSmaPattern(client, 'price_8d_sma') -class CatalogTree_Computed_Market_Range: +class CatalogTree_Market_Range: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.price_1m_max: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_1m_max') - self.price_1m_min: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_1m_min') - self.price_1w_max: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_1w_max') - self.price_1w_min: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_1w_min') - self.price_1y_max: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_1y_max') - self.price_1y_min: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_1y_min') - self.price_2w_choppiness_index: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_price_2w_choppiness_index') - self.price_2w_max: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_2w_max') - self.price_2w_min: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_price_2w_min') - self.price_true_range: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_price_true_range') - self.price_true_range_2w_sum: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_price_true_range_2w_sum') -class CatalogTree_Computed_Market_Returns: + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.price_1m_max: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1m_max') + self.price_1m_min: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1m_min') + self.price_1w_max: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1w_max') + self.price_1w_min: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1w_min') + self.price_1y_max: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1y_max') + self.price_1y_min: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_1y_min') + self.price_2w_choppiness_index: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_price_2w_choppiness_index') + self.price_2w_max: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_2w_max') + self.price_2w_min: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_price_2w_min') + self.price_true_range: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_price_true_range') + self.price_true_range_2w_sum: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_price_true_range_2w_sum') + +class CatalogTree_Market_Returns: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self._1d_returns_1m_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, '1d_returns_1m_sd') self._1d_returns_1w_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, '1d_returns_1w_sd') @@ -3712,514 +3524,399 @@ class CatalogTree_Computed_Market_Returns: self.downside_1m_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, 'downside_1m_sd') self.downside_1w_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, 'downside_1w_sd') self.downside_1y_sd: _1dReturns1mSdPattern = _1dReturns1mSdPattern(client, 'downside_1y_sd') - self.downside_returns: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_downside_returns') + self.downside_returns: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_downside_returns') self.price_returns: PriceAgoPattern[StoredF32] = PriceAgoPattern(client, 'price_returns') -class CatalogTree_Computed_Market_Volatility: +class CatalogTree_Market_Volatility: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.price_1m_volatility: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_price_1m_volatility') - self.price_1w_volatility: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_price_1w_volatility') - self.price_1y_volatility: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_price_1y_volatility') - self.sharpe_1m: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_sharpe_1m') - self.sharpe_1w: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_sharpe_1w') - self.sharpe_1y: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_sharpe_1y') - self.sortino_1m: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_sortino_1m') - self.sortino_1w: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_sortino_1w') - self.sortino_1y: MetricPattern22[StoredF32] = MetricPattern22(client, f'{base_path}_sortino_1y') -class CatalogTree_Computed_Outputs: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.count: CatalogTree_Computed_Outputs_Count = CatalogTree_Computed_Outputs_Count(client, f'{base_path}_count') - self.spent: CatalogTree_Computed_Outputs_Spent = CatalogTree_Computed_Outputs_Spent(client, f'{base_path}_spent') + self.price_1m_volatility: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_price_1m_volatility') + self.price_1w_volatility: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_price_1w_volatility') + self.price_1y_volatility: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_price_1y_volatility') + self.sharpe_1m: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_sharpe_1m') + self.sharpe_1w: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_sharpe_1w') + self.sharpe_1y: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_sharpe_1y') + self.sortino_1m: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_sortino_1m') + self.sortino_1w: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_sortino_1w') + self.sortino_1y: MetricPattern19[StoredF32] = MetricPattern19(client, f'{base_path}_sortino_1y') -class CatalogTree_Computed_Outputs_Count: +class CatalogTree_Outputs: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.count: CountPattern2[StoredU64] = CountPattern2(client, 'output_count') - self.utxo_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'exact_utxo_count') + self.count: CatalogTree_Outputs_Count = CatalogTree_Outputs_Count(client, f'{base_path}_count') + self.first_txoutindex: MetricPattern23[TxOutIndex] = MetricPattern23(client, f'{base_path}_first_txoutindex') + self.outputtype: MetricPattern26[OutputType] = MetricPattern26(client, f'{base_path}_outputtype') + self.spent: CatalogTree_Outputs_Spent = CatalogTree_Outputs_Spent(client, f'{base_path}_spent') + self.txindex: MetricPattern26[TxIndex] = MetricPattern26(client, f'{base_path}_txindex') + self.typeindex: MetricPattern26[TypeIndex] = MetricPattern26(client, f'{base_path}_typeindex') + self.value: MetricPattern26[Sats] = MetricPattern26(client, f'{base_path}_value') -class CatalogTree_Computed_Outputs_Spent: +class CatalogTree_Outputs_Count: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.txinindex: MetricPattern29[TxInIndex] = MetricPattern29(client, f'{base_path}_txinindex') + self.count: BlockSizePattern[StoredU64] = BlockSizePattern(client, 'output_count') + self.utxo_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'exact_utxo_count') -class CatalogTree_Computed_Pools: +class CatalogTree_Outputs_Spent: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.pool: MetricPattern26[PoolSlug] = MetricPattern26(client, f'{base_path}_pool') - self.vecs: CatalogTree_Computed_Pools_Vecs = CatalogTree_Computed_Pools_Vecs(client, f'{base_path}_vecs') + self.txinindex: MetricPattern26[TxInIndex] = MetricPattern26(client, f'{base_path}_txinindex') -class CatalogTree_Computed_Pools_Vecs: +class CatalogTree_Pools: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.axbt: AXbtPattern = AXbtPattern(client, 'axbt') - self.aaopool: AXbtPattern = AXbtPattern(client, 'aaopool') - self.antpool: AXbtPattern = AXbtPattern(client, 'antpool') - self.arkpool: AXbtPattern = AXbtPattern(client, 'arkpool') - self.asicminer: AXbtPattern = AXbtPattern(client, 'asicminer') - self.batpool: AXbtPattern = AXbtPattern(client, 'batpool') - self.bcmonster: AXbtPattern = AXbtPattern(client, 'bcmonster') - self.bcpoolio: AXbtPattern = AXbtPattern(client, 'bcpoolio') - self.binancepool: AXbtPattern = AXbtPattern(client, 'binancepool') - self.bitclub: AXbtPattern = AXbtPattern(client, 'bitclub') - self.bitfufupool: AXbtPattern = AXbtPattern(client, 'bitfufupool') - self.bitfury: AXbtPattern = AXbtPattern(client, 'bitfury') - self.bitminter: AXbtPattern = AXbtPattern(client, 'bitminter') - self.bitalo: AXbtPattern = AXbtPattern(client, 'bitalo') - self.bitcoinaffiliatenetwork: AXbtPattern = AXbtPattern(client, 'bitcoinaffiliatenetwork') - self.bitcoincom: AXbtPattern = AXbtPattern(client, 'bitcoincom') - self.bitcoinindia: AXbtPattern = AXbtPattern(client, 'bitcoinindia') - self.bitcoinrussia: AXbtPattern = AXbtPattern(client, 'bitcoinrussia') - self.bitcoinukraine: AXbtPattern = AXbtPattern(client, 'bitcoinukraine') - self.bitfarms: AXbtPattern = AXbtPattern(client, 'bitfarms') - self.bitparking: AXbtPattern = AXbtPattern(client, 'bitparking') - self.bitsolo: AXbtPattern = AXbtPattern(client, 'bitsolo') - self.bixin: AXbtPattern = AXbtPattern(client, 'bixin') - self.blockfills: AXbtPattern = AXbtPattern(client, 'blockfills') - self.braiinspool: AXbtPattern = AXbtPattern(client, 'braiinspool') - self.bravomining: AXbtPattern = AXbtPattern(client, 'bravomining') - self.btpool: AXbtPattern = AXbtPattern(client, 'btpool') - self.btccom: AXbtPattern = AXbtPattern(client, 'btccom') - self.btcdig: AXbtPattern = AXbtPattern(client, 'btcdig') - self.btcguild: AXbtPattern = AXbtPattern(client, 'btcguild') - self.btclab: AXbtPattern = AXbtPattern(client, 'btclab') - self.btcmp: AXbtPattern = AXbtPattern(client, 'btcmp') - self.btcnuggets: AXbtPattern = AXbtPattern(client, 'btcnuggets') - self.btcpoolparty: AXbtPattern = AXbtPattern(client, 'btcpoolparty') - self.btcserv: AXbtPattern = AXbtPattern(client, 'btcserv') - self.btctop: AXbtPattern = AXbtPattern(client, 'btctop') - self.btcc: AXbtPattern = AXbtPattern(client, 'btcc') - self.bwpool: AXbtPattern = AXbtPattern(client, 'bwpool') - self.bytepool: AXbtPattern = AXbtPattern(client, 'bytepool') - self.canoe: AXbtPattern = AXbtPattern(client, 'canoe') - self.canoepool: AXbtPattern = AXbtPattern(client, 'canoepool') - self.carbonnegative: AXbtPattern = AXbtPattern(client, 'carbonnegative') - self.ckpool: AXbtPattern = AXbtPattern(client, 'ckpool') - self.cloudhashing: AXbtPattern = AXbtPattern(client, 'cloudhashing') - self.coinlab: AXbtPattern = AXbtPattern(client, 'coinlab') - self.cointerra: AXbtPattern = AXbtPattern(client, 'cointerra') - self.connectbtc: AXbtPattern = AXbtPattern(client, 'connectbtc') - self.dpool: AXbtPattern = AXbtPattern(client, 'dpool') - self.dcexploration: AXbtPattern = AXbtPattern(client, 'dcexploration') - self.dcex: AXbtPattern = AXbtPattern(client, 'dcex') - self.digitalbtc: AXbtPattern = AXbtPattern(client, 'digitalbtc') - self.digitalxmintsy: AXbtPattern = AXbtPattern(client, 'digitalxmintsy') - self.eclipsemc: AXbtPattern = AXbtPattern(client, 'eclipsemc') - self.eightbaochi: AXbtPattern = AXbtPattern(client, 'eightbaochi') - self.ekanembtc: AXbtPattern = AXbtPattern(client, 'ekanembtc') - self.eligius: AXbtPattern = AXbtPattern(client, 'eligius') - self.emcdpool: AXbtPattern = AXbtPattern(client, 'emcdpool') - self.entrustcharitypool: AXbtPattern = AXbtPattern(client, 'entrustcharitypool') - self.eobot: AXbtPattern = AXbtPattern(client, 'eobot') - self.exxbw: AXbtPattern = AXbtPattern(client, 'exxbw') - self.f2pool: AXbtPattern = AXbtPattern(client, 'f2pool') - self.fiftyeightcoin: AXbtPattern = AXbtPattern(client, 'fiftyeightcoin') - self.foundryusa: AXbtPattern = AXbtPattern(client, 'foundryusa') - self.futurebitapollosolo: AXbtPattern = AXbtPattern(client, 'futurebitapollosolo') - self.gbminers: AXbtPattern = AXbtPattern(client, 'gbminers') - self.ghashio: AXbtPattern = AXbtPattern(client, 'ghashio') - self.givemecoins: AXbtPattern = AXbtPattern(client, 'givemecoins') - self.gogreenlight: AXbtPattern = AXbtPattern(client, 'gogreenlight') - self.haozhuzhu: AXbtPattern = AXbtPattern(client, 'haozhuzhu') - self.haominer: AXbtPattern = AXbtPattern(client, 'haominer') - self.hashbx: AXbtPattern = AXbtPattern(client, 'hashbx') - self.hashpool: AXbtPattern = AXbtPattern(client, 'hashpool') - self.helix: AXbtPattern = AXbtPattern(client, 'helix') - self.hhtt: AXbtPattern = AXbtPattern(client, 'hhtt') - self.hotpool: AXbtPattern = AXbtPattern(client, 'hotpool') - self.hummerpool: AXbtPattern = AXbtPattern(client, 'hummerpool') - self.huobipool: AXbtPattern = AXbtPattern(client, 'huobipool') - self.innopolistech: AXbtPattern = AXbtPattern(client, 'innopolistech') - self.kanopool: AXbtPattern = AXbtPattern(client, 'kanopool') - self.kncminer: AXbtPattern = AXbtPattern(client, 'kncminer') - self.kucoinpool: AXbtPattern = AXbtPattern(client, 'kucoinpool') - self.lubiancom: AXbtPattern = AXbtPattern(client, 'lubiancom') - self.luckypool: AXbtPattern = AXbtPattern(client, 'luckypool') - self.luxor: AXbtPattern = AXbtPattern(client, 'luxor') - self.marapool: AXbtPattern = AXbtPattern(client, 'marapool') - self.maxbtc: AXbtPattern = AXbtPattern(client, 'maxbtc') - self.maxipool: AXbtPattern = AXbtPattern(client, 'maxipool') - self.megabigpower: AXbtPattern = AXbtPattern(client, 'megabigpower') - self.minerium: AXbtPattern = AXbtPattern(client, 'minerium') - self.miningcity: AXbtPattern = AXbtPattern(client, 'miningcity') - self.miningdutch: AXbtPattern = AXbtPattern(client, 'miningdutch') - self.miningkings: AXbtPattern = AXbtPattern(client, 'miningkings') - self.miningsquared: AXbtPattern = AXbtPattern(client, 'miningsquared') - self.mmpool: AXbtPattern = AXbtPattern(client, 'mmpool') - self.mtred: AXbtPattern = AXbtPattern(client, 'mtred') - self.multicoinco: AXbtPattern = AXbtPattern(client, 'multicoinco') - self.multipool: AXbtPattern = AXbtPattern(client, 'multipool') - self.mybtccoinpool: AXbtPattern = AXbtPattern(client, 'mybtccoinpool') - self.neopool: AXbtPattern = AXbtPattern(client, 'neopool') - self.nexious: AXbtPattern = AXbtPattern(client, 'nexious') - self.nicehash: AXbtPattern = AXbtPattern(client, 'nicehash') - self.nmcbit: AXbtPattern = AXbtPattern(client, 'nmcbit') - self.novablock: AXbtPattern = AXbtPattern(client, 'novablock') - self.ocean: AXbtPattern = AXbtPattern(client, 'ocean') - self.okexpool: AXbtPattern = AXbtPattern(client, 'okexpool') - self.okminer: AXbtPattern = AXbtPattern(client, 'okminer') - self.okkong: AXbtPattern = AXbtPattern(client, 'okkong') - self.okpooltop: AXbtPattern = AXbtPattern(client, 'okpooltop') - self.onehash: AXbtPattern = AXbtPattern(client, 'onehash') - self.onem1x: AXbtPattern = AXbtPattern(client, 'onem1x') - self.onethash: AXbtPattern = AXbtPattern(client, 'onethash') - self.ozcoin: AXbtPattern = AXbtPattern(client, 'ozcoin') - self.phashio: AXbtPattern = AXbtPattern(client, 'phashio') - self.parasite: AXbtPattern = AXbtPattern(client, 'parasite') - self.patels: AXbtPattern = AXbtPattern(client, 'patels') - self.pegapool: AXbtPattern = AXbtPattern(client, 'pegapool') - self.phoenix: AXbtPattern = AXbtPattern(client, 'phoenix') - self.polmine: AXbtPattern = AXbtPattern(client, 'polmine') - self.pool175btc: AXbtPattern = AXbtPattern(client, 'pool175btc') - self.pool50btc: AXbtPattern = AXbtPattern(client, 'pool50btc') - self.poolin: AXbtPattern = AXbtPattern(client, 'poolin') - self.portlandhodl: AXbtPattern = AXbtPattern(client, 'portlandhodl') - self.publicpool: AXbtPattern = AXbtPattern(client, 'publicpool') - self.purebtccom: AXbtPattern = AXbtPattern(client, 'purebtccom') - self.rawpool: AXbtPattern = AXbtPattern(client, 'rawpool') - self.rigpool: AXbtPattern = AXbtPattern(client, 'rigpool') - self.sbicrypto: AXbtPattern = AXbtPattern(client, 'sbicrypto') - self.secpool: AXbtPattern = AXbtPattern(client, 'secpool') - self.secretsuperstar: AXbtPattern = AXbtPattern(client, 'secretsuperstar') - self.sevenpool: AXbtPattern = AXbtPattern(client, 'sevenpool') - self.shawnp0wers: AXbtPattern = AXbtPattern(client, 'shawnp0wers') - self.sigmapoolcom: AXbtPattern = AXbtPattern(client, 'sigmapoolcom') - self.simplecoinus: AXbtPattern = AXbtPattern(client, 'simplecoinus') - self.solock: AXbtPattern = AXbtPattern(client, 'solock') - self.spiderpool: AXbtPattern = AXbtPattern(client, 'spiderpool') - self.stminingcorp: AXbtPattern = AXbtPattern(client, 'stminingcorp') - self.tangpool: AXbtPattern = AXbtPattern(client, 'tangpool') - self.tatmaspool: AXbtPattern = AXbtPattern(client, 'tatmaspool') - self.tbdice: AXbtPattern = AXbtPattern(client, 'tbdice') - self.telco214: AXbtPattern = AXbtPattern(client, 'telco214') - self.terrapool: AXbtPattern = AXbtPattern(client, 'terrapool') - self.tiger: AXbtPattern = AXbtPattern(client, 'tiger') - self.tigerpoolnet: AXbtPattern = AXbtPattern(client, 'tigerpoolnet') - self.titan: AXbtPattern = AXbtPattern(client, 'titan') - self.transactioncoinmining: AXbtPattern = AXbtPattern(client, 'transactioncoinmining') - self.trickysbtcpool: AXbtPattern = AXbtPattern(client, 'trickysbtcpool') - self.triplemining: AXbtPattern = AXbtPattern(client, 'triplemining') - self.twentyoneinc: AXbtPattern = AXbtPattern(client, 'twentyoneinc') - self.ultimuspool: AXbtPattern = AXbtPattern(client, 'ultimuspool') - self.unknown: AXbtPattern = AXbtPattern(client, 'unknown') - self.unomp: AXbtPattern = AXbtPattern(client, 'unomp') - self.viabtc: AXbtPattern = AXbtPattern(client, 'viabtc') - self.waterhole: AXbtPattern = AXbtPattern(client, 'waterhole') - self.wayicn: AXbtPattern = AXbtPattern(client, 'wayicn') - self.whitepool: AXbtPattern = AXbtPattern(client, 'whitepool') - self.wk057: AXbtPattern = AXbtPattern(client, 'wk057') - self.yourbtcnet: AXbtPattern = AXbtPattern(client, 'yourbtcnet') - self.zulupool: AXbtPattern = AXbtPattern(client, 'zulupool') + self.pool: MetricPattern23[PoolSlug] = MetricPattern23(client, f'{base_path}_pool') + self.vecs: CatalogTree_Pools_Vecs = CatalogTree_Pools_Vecs(client, f'{base_path}_vecs') -class CatalogTree_Computed_Positions: +class CatalogTree_Pools_Vecs: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.position: MetricPattern17[BlkPosition] = MetricPattern17(client, f'{base_path}_position') + self.aaopool: AaopoolPattern = AaopoolPattern(client, 'aaopool') + self.antpool: AaopoolPattern = AaopoolPattern(client, 'antpool') + self.arkpool: AaopoolPattern = AaopoolPattern(client, 'arkpool') + self.asicminer: AaopoolPattern = AaopoolPattern(client, 'asicminer') + self.axbt: AaopoolPattern = AaopoolPattern(client, 'axbt') + self.batpool: AaopoolPattern = AaopoolPattern(client, 'batpool') + self.bcmonster: AaopoolPattern = AaopoolPattern(client, 'bcmonster') + self.bcpoolio: AaopoolPattern = AaopoolPattern(client, 'bcpoolio') + self.binancepool: AaopoolPattern = AaopoolPattern(client, 'binancepool') + self.bitalo: AaopoolPattern = AaopoolPattern(client, 'bitalo') + self.bitclub: AaopoolPattern = AaopoolPattern(client, 'bitclub') + self.bitcoinaffiliatenetwork: AaopoolPattern = AaopoolPattern(client, 'bitcoinaffiliatenetwork') + self.bitcoincom: AaopoolPattern = AaopoolPattern(client, 'bitcoincom') + self.bitcoinindia: AaopoolPattern = AaopoolPattern(client, 'bitcoinindia') + self.bitcoinrussia: AaopoolPattern = AaopoolPattern(client, 'bitcoinrussia') + self.bitcoinukraine: AaopoolPattern = AaopoolPattern(client, 'bitcoinukraine') + self.bitfarms: AaopoolPattern = AaopoolPattern(client, 'bitfarms') + self.bitfufupool: AaopoolPattern = AaopoolPattern(client, 'bitfufupool') + self.bitfury: AaopoolPattern = AaopoolPattern(client, 'bitfury') + self.bitminter: AaopoolPattern = AaopoolPattern(client, 'bitminter') + self.bitparking: AaopoolPattern = AaopoolPattern(client, 'bitparking') + self.bitsolo: AaopoolPattern = AaopoolPattern(client, 'bitsolo') + self.bixin: AaopoolPattern = AaopoolPattern(client, 'bixin') + self.blockfills: AaopoolPattern = AaopoolPattern(client, 'blockfills') + self.braiinspool: AaopoolPattern = AaopoolPattern(client, 'braiinspool') + self.bravomining: AaopoolPattern = AaopoolPattern(client, 'bravomining') + self.btcc: AaopoolPattern = AaopoolPattern(client, 'btcc') + self.btccom: AaopoolPattern = AaopoolPattern(client, 'btccom') + self.btcdig: AaopoolPattern = AaopoolPattern(client, 'btcdig') + self.btcguild: AaopoolPattern = AaopoolPattern(client, 'btcguild') + self.btclab: AaopoolPattern = AaopoolPattern(client, 'btclab') + self.btcmp: AaopoolPattern = AaopoolPattern(client, 'btcmp') + self.btcnuggets: AaopoolPattern = AaopoolPattern(client, 'btcnuggets') + self.btcpoolparty: AaopoolPattern = AaopoolPattern(client, 'btcpoolparty') + self.btcserv: AaopoolPattern = AaopoolPattern(client, 'btcserv') + self.btctop: AaopoolPattern = AaopoolPattern(client, 'btctop') + self.btpool: AaopoolPattern = AaopoolPattern(client, 'btpool') + self.bwpool: AaopoolPattern = AaopoolPattern(client, 'bwpool') + self.bytepool: AaopoolPattern = AaopoolPattern(client, 'bytepool') + self.canoe: AaopoolPattern = AaopoolPattern(client, 'canoe') + self.canoepool: AaopoolPattern = AaopoolPattern(client, 'canoepool') + self.carbonnegative: AaopoolPattern = AaopoolPattern(client, 'carbonnegative') + self.ckpool: AaopoolPattern = AaopoolPattern(client, 'ckpool') + self.cloudhashing: AaopoolPattern = AaopoolPattern(client, 'cloudhashing') + self.coinlab: AaopoolPattern = AaopoolPattern(client, 'coinlab') + self.cointerra: AaopoolPattern = AaopoolPattern(client, 'cointerra') + self.connectbtc: AaopoolPattern = AaopoolPattern(client, 'connectbtc') + self.dcex: AaopoolPattern = AaopoolPattern(client, 'dcex') + self.dcexploration: AaopoolPattern = AaopoolPattern(client, 'dcexploration') + self.digitalbtc: AaopoolPattern = AaopoolPattern(client, 'digitalbtc') + self.digitalxmintsy: AaopoolPattern = AaopoolPattern(client, 'digitalxmintsy') + self.dpool: AaopoolPattern = AaopoolPattern(client, 'dpool') + self.eclipsemc: AaopoolPattern = AaopoolPattern(client, 'eclipsemc') + self.eightbaochi: AaopoolPattern = AaopoolPattern(client, 'eightbaochi') + self.ekanembtc: AaopoolPattern = AaopoolPattern(client, 'ekanembtc') + self.eligius: AaopoolPattern = AaopoolPattern(client, 'eligius') + self.emcdpool: AaopoolPattern = AaopoolPattern(client, 'emcdpool') + self.entrustcharitypool: AaopoolPattern = AaopoolPattern(client, 'entrustcharitypool') + self.eobot: AaopoolPattern = AaopoolPattern(client, 'eobot') + self.exxbw: AaopoolPattern = AaopoolPattern(client, 'exxbw') + self.f2pool: AaopoolPattern = AaopoolPattern(client, 'f2pool') + self.fiftyeightcoin: AaopoolPattern = AaopoolPattern(client, 'fiftyeightcoin') + self.foundryusa: AaopoolPattern = AaopoolPattern(client, 'foundryusa') + self.futurebitapollosolo: AaopoolPattern = AaopoolPattern(client, 'futurebitapollosolo') + self.gbminers: AaopoolPattern = AaopoolPattern(client, 'gbminers') + self.ghashio: AaopoolPattern = AaopoolPattern(client, 'ghashio') + self.givemecoins: AaopoolPattern = AaopoolPattern(client, 'givemecoins') + self.gogreenlight: AaopoolPattern = AaopoolPattern(client, 'gogreenlight') + self.haominer: AaopoolPattern = AaopoolPattern(client, 'haominer') + self.haozhuzhu: AaopoolPattern = AaopoolPattern(client, 'haozhuzhu') + self.hashbx: AaopoolPattern = AaopoolPattern(client, 'hashbx') + self.hashpool: AaopoolPattern = AaopoolPattern(client, 'hashpool') + self.helix: AaopoolPattern = AaopoolPattern(client, 'helix') + self.hhtt: AaopoolPattern = AaopoolPattern(client, 'hhtt') + self.hotpool: AaopoolPattern = AaopoolPattern(client, 'hotpool') + self.hummerpool: AaopoolPattern = AaopoolPattern(client, 'hummerpool') + self.huobipool: AaopoolPattern = AaopoolPattern(client, 'huobipool') + self.innopolistech: AaopoolPattern = AaopoolPattern(client, 'innopolistech') + self.kanopool: AaopoolPattern = AaopoolPattern(client, 'kanopool') + self.kncminer: AaopoolPattern = AaopoolPattern(client, 'kncminer') + self.kucoinpool: AaopoolPattern = AaopoolPattern(client, 'kucoinpool') + self.lubiancom: AaopoolPattern = AaopoolPattern(client, 'lubiancom') + self.luckypool: AaopoolPattern = AaopoolPattern(client, 'luckypool') + self.luxor: AaopoolPattern = AaopoolPattern(client, 'luxor') + self.marapool: AaopoolPattern = AaopoolPattern(client, 'marapool') + self.maxbtc: AaopoolPattern = AaopoolPattern(client, 'maxbtc') + self.maxipool: AaopoolPattern = AaopoolPattern(client, 'maxipool') + self.megabigpower: AaopoolPattern = AaopoolPattern(client, 'megabigpower') + self.minerium: AaopoolPattern = AaopoolPattern(client, 'minerium') + self.miningcity: AaopoolPattern = AaopoolPattern(client, 'miningcity') + self.miningdutch: AaopoolPattern = AaopoolPattern(client, 'miningdutch') + self.miningkings: AaopoolPattern = AaopoolPattern(client, 'miningkings') + self.miningsquared: AaopoolPattern = AaopoolPattern(client, 'miningsquared') + self.mmpool: AaopoolPattern = AaopoolPattern(client, 'mmpool') + self.mtred: AaopoolPattern = AaopoolPattern(client, 'mtred') + self.multicoinco: AaopoolPattern = AaopoolPattern(client, 'multicoinco') + self.multipool: AaopoolPattern = AaopoolPattern(client, 'multipool') + self.mybtccoinpool: AaopoolPattern = AaopoolPattern(client, 'mybtccoinpool') + self.neopool: AaopoolPattern = AaopoolPattern(client, 'neopool') + self.nexious: AaopoolPattern = AaopoolPattern(client, 'nexious') + self.nicehash: AaopoolPattern = AaopoolPattern(client, 'nicehash') + self.nmcbit: AaopoolPattern = AaopoolPattern(client, 'nmcbit') + self.novablock: AaopoolPattern = AaopoolPattern(client, 'novablock') + self.ocean: AaopoolPattern = AaopoolPattern(client, 'ocean') + self.okexpool: AaopoolPattern = AaopoolPattern(client, 'okexpool') + self.okkong: AaopoolPattern = AaopoolPattern(client, 'okkong') + self.okminer: AaopoolPattern = AaopoolPattern(client, 'okminer') + self.okpooltop: AaopoolPattern = AaopoolPattern(client, 'okpooltop') + self.onehash: AaopoolPattern = AaopoolPattern(client, 'onehash') + self.onem1x: AaopoolPattern = AaopoolPattern(client, 'onem1x') + self.onethash: AaopoolPattern = AaopoolPattern(client, 'onethash') + self.ozcoin: AaopoolPattern = AaopoolPattern(client, 'ozcoin') + self.parasite: AaopoolPattern = AaopoolPattern(client, 'parasite') + self.patels: AaopoolPattern = AaopoolPattern(client, 'patels') + self.pegapool: AaopoolPattern = AaopoolPattern(client, 'pegapool') + self.phashio: AaopoolPattern = AaopoolPattern(client, 'phashio') + self.phoenix: AaopoolPattern = AaopoolPattern(client, 'phoenix') + self.polmine: AaopoolPattern = AaopoolPattern(client, 'polmine') + self.pool175btc: AaopoolPattern = AaopoolPattern(client, 'pool175btc') + self.pool50btc: AaopoolPattern = AaopoolPattern(client, 'pool50btc') + self.poolin: AaopoolPattern = AaopoolPattern(client, 'poolin') + self.portlandhodl: AaopoolPattern = AaopoolPattern(client, 'portlandhodl') + self.publicpool: AaopoolPattern = AaopoolPattern(client, 'publicpool') + self.purebtccom: AaopoolPattern = AaopoolPattern(client, 'purebtccom') + self.rawpool: AaopoolPattern = AaopoolPattern(client, 'rawpool') + self.rigpool: AaopoolPattern = AaopoolPattern(client, 'rigpool') + self.sbicrypto: AaopoolPattern = AaopoolPattern(client, 'sbicrypto') + self.secpool: AaopoolPattern = AaopoolPattern(client, 'secpool') + self.secretsuperstar: AaopoolPattern = AaopoolPattern(client, 'secretsuperstar') + self.sevenpool: AaopoolPattern = AaopoolPattern(client, 'sevenpool') + self.shawnp0wers: AaopoolPattern = AaopoolPattern(client, 'shawnp0wers') + self.sigmapoolcom: AaopoolPattern = AaopoolPattern(client, 'sigmapoolcom') + self.simplecoinus: AaopoolPattern = AaopoolPattern(client, 'simplecoinus') + self.solock: AaopoolPattern = AaopoolPattern(client, 'solock') + self.spiderpool: AaopoolPattern = AaopoolPattern(client, 'spiderpool') + self.stminingcorp: AaopoolPattern = AaopoolPattern(client, 'stminingcorp') + self.tangpool: AaopoolPattern = AaopoolPattern(client, 'tangpool') + self.tatmaspool: AaopoolPattern = AaopoolPattern(client, 'tatmaspool') + self.tbdice: AaopoolPattern = AaopoolPattern(client, 'tbdice') + self.telco214: AaopoolPattern = AaopoolPattern(client, 'telco214') + self.terrapool: AaopoolPattern = AaopoolPattern(client, 'terrapool') + self.tiger: AaopoolPattern = AaopoolPattern(client, 'tiger') + self.tigerpoolnet: AaopoolPattern = AaopoolPattern(client, 'tigerpoolnet') + self.titan: AaopoolPattern = AaopoolPattern(client, 'titan') + self.transactioncoinmining: AaopoolPattern = AaopoolPattern(client, 'transactioncoinmining') + self.trickysbtcpool: AaopoolPattern = AaopoolPattern(client, 'trickysbtcpool') + self.triplemining: AaopoolPattern = AaopoolPattern(client, 'triplemining') + self.twentyoneinc: AaopoolPattern = AaopoolPattern(client, 'twentyoneinc') + self.ultimuspool: AaopoolPattern = AaopoolPattern(client, 'ultimuspool') + self.unknown: AaopoolPattern = AaopoolPattern(client, 'unknown') + self.unomp: AaopoolPattern = AaopoolPattern(client, 'unomp') + self.viabtc: AaopoolPattern = AaopoolPattern(client, 'viabtc') + self.waterhole: AaopoolPattern = AaopoolPattern(client, 'waterhole') + self.wayicn: AaopoolPattern = AaopoolPattern(client, 'wayicn') + self.whitepool: AaopoolPattern = AaopoolPattern(client, 'whitepool') + self.wk057: AaopoolPattern = AaopoolPattern(client, 'wk057') + self.yourbtcnet: AaopoolPattern = AaopoolPattern(client, 'yourbtcnet') + self.zulupool: AaopoolPattern = AaopoolPattern(client, 'zulupool') -class CatalogTree_Computed_Price: +class CatalogTree_Positions: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.ohlc: CatalogTree_Computed_Price_Ohlc = CatalogTree_Computed_Price_Ohlc(client, f'{base_path}_ohlc') - self.sats: CatalogTree_Computed_Price_Sats = CatalogTree_Computed_Price_Sats(client, f'{base_path}_sats') - self.usd: CatalogTree_Computed_Price_Usd = CatalogTree_Computed_Price_Usd(client, f'{base_path}_usd') + self.position: MetricPattern14[BlkPosition] = MetricPattern14(client, f'{base_path}_position') -class CatalogTree_Computed_Price_Ohlc: +class CatalogTree_Price: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.ohlc_in_cents: MetricPattern10[OHLCCents] = MetricPattern10(client, f'{base_path}_ohlc_in_cents') + self.cents: CatalogTree_Price_Cents = CatalogTree_Price_Cents(client, f'{base_path}_cents') + self.sats: CatalogTree_Price_Sats = CatalogTree_Price_Sats(client, f'{base_path}_sats') + self.usd: CatalogTree_Price_Usd = CatalogTree_Price_Usd(client, f'{base_path}_usd') -class CatalogTree_Computed_Price_Sats: +class CatalogTree_Price_Cents: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.price_close_in_sats: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_price_close_in_sats') - self.price_high_in_sats: PriceHighInSatsPattern[Sats] = PriceHighInSatsPattern(client, 'price_high_in_sats') - self.price_low_in_sats: PriceLowInSatsPattern[Sats] = PriceLowInSatsPattern(client, 'price_low_in_sats') - self.price_ohlc_in_sats: MetricPattern1[OHLCSats] = MetricPattern1(client, f'{base_path}_price_ohlc_in_sats') - self.price_open_in_sats: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_price_open_in_sats') + self.ohlc: MetricPattern7[OHLCCents] = MetricPattern7(client, f'{base_path}_ohlc') -class CatalogTree_Computed_Price_Usd: +class CatalogTree_Price_Sats: """Catalog tree node.""" - + + def __init__(self, client: BrkClientBase, base_path: str = ''): + self.price_close_sats: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_price_close_sats') + self.price_high_sats: PriceHighSatsPattern[Sats] = PriceHighSatsPattern(client, 'price_high_sats') + self.price_low_sats: PriceHighSatsPattern[Sats] = PriceHighSatsPattern(client, 'price_low_sats') + self.price_ohlc_sats: MetricPattern1[OHLCSats] = MetricPattern1(client, f'{base_path}_price_ohlc_sats') + self.price_open_sats: MetricPattern1[Sats] = MetricPattern1(client, f'{base_path}_price_open_sats') + +class CatalogTree_Price_Usd: + """Catalog tree node.""" + def __init__(self, client: BrkClientBase, base_path: str = ''): self.price_close: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_price_close') - self.price_close_in_cents: MetricPattern10[Cents] = MetricPattern10(client, f'{base_path}_price_close_in_cents') - self.price_high: PriceHighInSatsPattern[Dollars] = PriceHighInSatsPattern(client, 'price_high') - self.price_high_in_cents: MetricPattern10[Cents] = MetricPattern10(client, f'{base_path}_price_high_in_cents') - self.price_low: PriceLowInSatsPattern[Dollars] = PriceLowInSatsPattern(client, 'price_low') - self.price_low_in_cents: MetricPattern10[Cents] = MetricPattern10(client, f'{base_path}_price_low_in_cents') + self.price_close_cents: MetricPattern7[Cents] = MetricPattern7(client, f'{base_path}_price_close_cents') + self.price_high: PriceHighSatsPattern[Dollars] = PriceHighSatsPattern(client, 'price_high') + self.price_high_cents: MetricPattern7[Cents] = MetricPattern7(client, f'{base_path}_price_high_cents') + self.price_low: PriceHighSatsPattern[Dollars] = PriceHighSatsPattern(client, 'price_low') + self.price_low_cents: MetricPattern7[Cents] = MetricPattern7(client, f'{base_path}_price_low_cents') self.price_ohlc: MetricPattern1[OHLCDollars] = MetricPattern1(client, f'{base_path}_price_ohlc') self.price_open: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_price_open') - self.price_open_in_cents: MetricPattern10[Cents] = MetricPattern10(client, f'{base_path}_price_open_in_cents') + self.price_open_cents: MetricPattern7[Cents] = MetricPattern7(client, f'{base_path}_price_open_cents') -class CatalogTree_Computed_Scripts: +class CatalogTree_Scripts: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.count: CatalogTree_Computed_Scripts_Count = CatalogTree_Computed_Scripts_Count(client, f'{base_path}_count') - self.value: CatalogTree_Computed_Scripts_Value = CatalogTree_Computed_Scripts_Value(client, f'{base_path}_value') + self.count: CatalogTree_Scripts_Count = CatalogTree_Scripts_Count(client, f'{base_path}_count') + self.empty_to_txindex: MetricPattern22[TxIndex] = MetricPattern22(client, f'{base_path}_empty_to_txindex') + self.first_emptyoutputindex: MetricPattern23[EmptyOutputIndex] = MetricPattern23(client, f'{base_path}_first_emptyoutputindex') + self.first_opreturnindex: MetricPattern23[OpReturnIndex] = MetricPattern23(client, f'{base_path}_first_opreturnindex') + self.first_p2msoutputindex: MetricPattern23[P2MSOutputIndex] = MetricPattern23(client, f'{base_path}_first_p2msoutputindex') + self.first_unknownoutputindex: MetricPattern23[UnknownOutputIndex] = MetricPattern23(client, f'{base_path}_first_unknownoutputindex') + self.opreturn_to_txindex: MetricPattern25[TxIndex] = MetricPattern25(client, f'{base_path}_opreturn_to_txindex') + self.p2ms_to_txindex: MetricPattern28[TxIndex] = MetricPattern28(client, f'{base_path}_p2ms_to_txindex') + self.unknown_to_txindex: MetricPattern37[TxIndex] = MetricPattern37(client, f'{base_path}_unknown_to_txindex') + self.value: CatalogTree_Scripts_Value = CatalogTree_Scripts_Value(client, f'{base_path}_value') -class CatalogTree_Computed_Scripts_Count: +class CatalogTree_Scripts_Count: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.emptyoutput_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'emptyoutput_count') - self.opreturn_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'opreturn_count') - self.p2a_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'p2a_count') - self.p2ms_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'p2ms_count') - self.p2pk33_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'p2pk33_count') - self.p2pk65_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'p2pk65_count') - self.p2pkh_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'p2pkh_count') - self.p2sh_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'p2sh_count') - self.p2tr_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'p2tr_count') - self.p2wpkh_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'p2wpkh_count') - self.p2wsh_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'p2wsh_count') - self.segwit_adoption: BlockCountPattern[StoredF32] = BlockCountPattern(client, 'segwit_adoption') - self.segwit_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'segwit_count') - self.taproot_adoption: BlockCountPattern[StoredF32] = BlockCountPattern(client, 'taproot_adoption') - self.unknownoutput_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'unknownoutput_count') + self.emptyoutput: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'emptyoutput_count') + self.opreturn: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'opreturn_count') + self.p2a: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2a_count') + self.p2ms: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2ms_count') + self.p2pk33: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2pk33_count') + self.p2pk65: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2pk65_count') + self.p2pkh: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2pkh_count') + self.p2sh: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2sh_count') + self.p2tr: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2tr_count') + self.p2wpkh: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2wpkh_count') + self.p2wsh: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'p2wsh_count') + self.segwit: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'segwit_count') + self.segwit_adoption: SatsPattern[StoredF32] = SatsPattern(client, 'segwit_adoption') + self.taproot_adoption: SatsPattern[StoredF32] = SatsPattern(client, 'taproot_adoption') + self.unknownoutput: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'unknownoutput_count') -class CatalogTree_Computed_Scripts_Value: +class CatalogTree_Scripts_Value: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.opreturn_value: CoinbasePattern = CoinbasePattern(client, 'opreturn_value') + self.opreturn: CoinbasePattern = CoinbasePattern(client, 'opreturn_value') -class CatalogTree_Computed_Supply: +class CatalogTree_Supply: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.burned: CatalogTree_Computed_Supply_Burned = CatalogTree_Computed_Supply_Burned(client, f'{base_path}_burned') - self.circulating: CatalogTree_Computed_Supply_Circulating = CatalogTree_Computed_Supply_Circulating(client, f'{base_path}_circulating') - self.inflation: CatalogTree_Computed_Supply_Inflation = CatalogTree_Computed_Supply_Inflation(client, f'{base_path}_inflation') - self.market_cap: CatalogTree_Computed_Supply_MarketCap = CatalogTree_Computed_Supply_MarketCap(client, f'{base_path}_market_cap') - self.velocity: CatalogTree_Computed_Supply_Velocity = CatalogTree_Computed_Supply_Velocity(client, f'{base_path}_velocity') + self.burned: CatalogTree_Supply_Burned = CatalogTree_Supply_Burned(client, f'{base_path}_burned') + self.circulating: ActiveSupplyPattern = ActiveSupplyPattern(client, 'circulating') + self.inflation: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_inflation') + self.market_cap: MetricPattern3[Dollars] = MetricPattern3(client, f'{base_path}_market_cap') + self.velocity: CatalogTree_Supply_Velocity = CatalogTree_Supply_Velocity(client, f'{base_path}_velocity') -class CatalogTree_Computed_Supply_Burned: +class CatalogTree_Supply_Burned: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): self.opreturn: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, 'opreturn_supply') self.unspendable: UnclaimedRewardsPattern = UnclaimedRewardsPattern(client, 'unspendable_supply') -class CatalogTree_Computed_Supply_Circulating: +class CatalogTree_Supply_Velocity: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.btc: MetricPattern26[Bitcoin] = MetricPattern26(client, f'{base_path}_btc') - self.indexes: ActiveSupplyPattern = ActiveSupplyPattern(client, 'circulating') - self.sats: MetricPattern26[Sats] = MetricPattern26(client, f'{base_path}_sats') - self.usd: MetricPattern26[Dollars] = MetricPattern26(client, f'{base_path}_usd') -class CatalogTree_Computed_Supply_Inflation: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.indexes: IndexesPattern2[StoredF32] = IndexesPattern2(client, 'inflation_rate') + self.btc: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_btc') + self.usd: MetricPattern4[StoredF64] = MetricPattern4(client, f'{base_path}_usd') -class CatalogTree_Computed_Supply_MarketCap: +class CatalogTree_Transactions: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.height: MetricPattern26[Dollars] = MetricPattern26(client, f'{base_path}_height') - self.indexes: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_indexes') + self.base_size: MetricPattern36[StoredU32] = MetricPattern36(client, f'{base_path}_base_size') + self.count: CatalogTree_Transactions_Count = CatalogTree_Transactions_Count(client, f'{base_path}_count') + self.fees: CatalogTree_Transactions_Fees = CatalogTree_Transactions_Fees(client, f'{base_path}_fees') + self.first_txindex: MetricPattern23[TxIndex] = MetricPattern23(client, f'{base_path}_first_txindex') + self.first_txinindex: MetricPattern36[TxInIndex] = MetricPattern36(client, f'{base_path}_first_txinindex') + self.first_txoutindex: MetricPattern36[TxOutIndex] = MetricPattern36(client, f'{base_path}_first_txoutindex') + self.height: MetricPattern36[Height] = MetricPattern36(client, f'{base_path}_height') + self.is_explicitly_rbf: MetricPattern36[StoredBool] = MetricPattern36(client, f'{base_path}_is_explicitly_rbf') + self.rawlocktime: MetricPattern36[RawLockTime] = MetricPattern36(client, f'{base_path}_rawlocktime') + self.size: CatalogTree_Transactions_Size = CatalogTree_Transactions_Size(client, f'{base_path}_size') + self.total_size: MetricPattern36[StoredU32] = MetricPattern36(client, f'{base_path}_total_size') + self.txid: MetricPattern36[Txid] = MetricPattern36(client, f'{base_path}_txid') + self.txversion: MetricPattern36[TxVersion] = MetricPattern36(client, f'{base_path}_txversion') + self.versions: CatalogTree_Transactions_Versions = CatalogTree_Transactions_Versions(client, f'{base_path}_versions') + self.volume: CatalogTree_Transactions_Volume = CatalogTree_Transactions_Volume(client, f'{base_path}_volume') -class CatalogTree_Computed_Supply_Velocity: +class CatalogTree_Transactions_Count: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.btc: IndexesPattern2[StoredF64] = IndexesPattern2(client, 'btc_velocity') - self.usd: IndexesPattern2[StoredF64] = IndexesPattern2(client, 'usd_velocity') + self.is_coinbase: MetricPattern36[StoredBool] = MetricPattern36(client, f'{base_path}_is_coinbase') + self.tx_count: BitcoinPattern[StoredU64] = BitcoinPattern(client, 'tx_count') -class CatalogTree_Computed_Transactions: +class CatalogTree_Transactions_Fees: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.count: CatalogTree_Computed_Transactions_Count = CatalogTree_Computed_Transactions_Count(client, f'{base_path}_count') - self.fees: CatalogTree_Computed_Transactions_Fees = CatalogTree_Computed_Transactions_Fees(client, f'{base_path}_fees') - self.size: CatalogTree_Computed_Transactions_Size = CatalogTree_Computed_Transactions_Size(client, f'{base_path}_size') - self.versions: CatalogTree_Computed_Transactions_Versions = CatalogTree_Computed_Transactions_Versions(client, f'{base_path}_versions') - self.volume: CatalogTree_Computed_Transactions_Volume = CatalogTree_Computed_Transactions_Volume(client, f'{base_path}_volume') + self.fee: CatalogTree_Transactions_Fees_Fee = CatalogTree_Transactions_Fees_Fee(client, f'{base_path}_fee') + self.fee_rate: IntervalPattern[FeeRate] = IntervalPattern(client, 'fee_rate') + self.input_value: MetricPattern36[Sats] = MetricPattern36(client, f'{base_path}_input_value') + self.output_value: MetricPattern36[Sats] = MetricPattern36(client, f'{base_path}_output_value') -class CatalogTree_Computed_Transactions_Count: +class CatalogTree_Transactions_Fees_Fee: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.is_coinbase: MetricPattern39[StoredBool] = MetricPattern39(client, f'{base_path}_is_coinbase') - self.tx_count: DollarsPattern[StoredU64] = DollarsPattern(client, 'tx_count') + self.bitcoin: BlockSizePattern[Bitcoin] = BlockSizePattern(client, 'fee_btc') + self.dollars: BlockSizePattern[Dollars] = BlockSizePattern(client, 'fee_usd') + self.sats: BitcoinPattern[Sats] = BitcoinPattern(client, 'fee') -class CatalogTree_Computed_Transactions_Fees: +class CatalogTree_Transactions_Size: """Catalog tree node.""" - + def __init__(self, client: BrkClientBase, base_path: str = ''): - self.fee: CatalogTree_Computed_Transactions_Fees_Fee = CatalogTree_Computed_Transactions_Fees_Fee(client, f'{base_path}_fee') - self.fee_rate: CatalogTree_Computed_Transactions_Fees_FeeRate = CatalogTree_Computed_Transactions_Fees_FeeRate(client, f'{base_path}_fee_rate') - self.input_value: MetricPattern39[Sats] = MetricPattern39(client, f'{base_path}_input_value') - self.output_value: MetricPattern39[Sats] = MetricPattern39(client, f'{base_path}_output_value') + self.tx_vsize: TxVsizePattern[VSize] = TxVsizePattern(client, 'tx_vsize') + self.tx_weight: TxVsizePattern[Weight] = TxVsizePattern(client, 'tx_weight') + self.vsize: MetricPattern36[VSize] = MetricPattern36(client, f'{base_path}_vsize') + self.weight: MetricPattern36[Weight] = MetricPattern36(client, f'{base_path}_weight') -class CatalogTree_Computed_Transactions_Fees_Fee: +class CatalogTree_Transactions_Versions: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.base: MetricPattern39[Sats] = MetricPattern39(client, f'{base_path}_base') - self.bitcoin: CatalogTree_Computed_Transactions_Fees_Fee_Bitcoin = CatalogTree_Computed_Transactions_Fees_Fee_Bitcoin(client, f'{base_path}_bitcoin') - self.dollars: CountPattern2[Dollars] = CountPattern2(client, 'fee_usd') - self.sats: CountPattern2[Sats] = CountPattern2(client, 'fee') -class CatalogTree_Computed_Transactions_Fees_Fee_Bitcoin: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.average: MetricPattern1[Bitcoin] = MetricPattern1(client, f'{base_path}_average') - self.cumulative: MetricPattern1[Bitcoin] = MetricPattern1(client, f'{base_path}_cumulative') - self.max: MetricPattern1[Bitcoin] = MetricPattern1(client, f'{base_path}_max') - self.min: MetricPattern1[Bitcoin] = MetricPattern1(client, f'{base_path}_min') - self.sum: MetricPattern1[Bitcoin] = MetricPattern1(client, f'{base_path}_sum') - -class CatalogTree_Computed_Transactions_Fees_FeeRate: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.average: MetricPattern1[FeeRate] = MetricPattern1(client, f'{base_path}_average') - self.base: MetricPattern39[FeeRate] = MetricPattern39(client, f'{base_path}_base') - self.max: MetricPattern1[FeeRate] = MetricPattern1(client, f'{base_path}_max') - self.min: MetricPattern1[FeeRate] = MetricPattern1(client, f'{base_path}_min') - self.percentiles: PercentilesPattern[FeeRate] = PercentilesPattern(client, 'fee_rate') - -class CatalogTree_Computed_Transactions_Size: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.tx_vsize: BlockIntervalPattern[VSize] = BlockIntervalPattern(client, 'tx_vsize') - self.tx_weight: BlockIntervalPattern[Weight] = BlockIntervalPattern(client, 'tx_weight') - self.vsize: MetricPattern39[VSize] = MetricPattern39(client, f'{base_path}_vsize') - self.weight: MetricPattern39[Weight] = MetricPattern39(client, f'{base_path}_weight') - -class CatalogTree_Computed_Transactions_Versions: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): self.tx_v1: BlockCountPattern[StoredU64] = BlockCountPattern(client, 'tx_v1') self.tx_v2: BlockCountPattern[StoredU64] = BlockCountPattern(client, 'tx_v2') self.tx_v3: BlockCountPattern[StoredU64] = BlockCountPattern(client, 'tx_v3') -class CatalogTree_Computed_Transactions_Volume: +class CatalogTree_Transactions_Volume: """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.annualized_volume: MetricPattern5[Sats] = MetricPattern5(client, f'{base_path}_annualized_volume') - self.annualized_volume_btc: MetricPattern5[Bitcoin] = MetricPattern5(client, f'{base_path}_annualized_volume_btc') - self.annualized_volume_usd: MetricPattern5[Dollars] = MetricPattern5(client, f'{base_path}_annualized_volume_usd') - self.inputs_per_sec: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_inputs_per_sec') - self.outputs_per_sec: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_outputs_per_sec') - self.sent_sum: CatalogTree_Computed_Transactions_Volume_SentSum = CatalogTree_Computed_Transactions_Volume_SentSum(client, f'{base_path}_sent_sum') - self.tx_per_sec: MetricPattern5[StoredF32] = MetricPattern5(client, f'{base_path}_tx_per_sec') -class CatalogTree_Computed_Transactions_Volume_SentSum: - """Catalog tree node.""" - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.bitcoin: MetricPattern1[Bitcoin] = MetricPattern1(client, f'{base_path}_bitcoin') - self.dollars: DifficultyAdjustmentPattern[Dollars] = DifficultyAdjustmentPattern(client, 'sent_sum_usd') - self.sats: DifficultyAdjustmentPattern[Sats] = DifficultyAdjustmentPattern(client, 'sent_sum') - -class CatalogTree_Indexed: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.address: CatalogTree_Indexed_Address = CatalogTree_Indexed_Address(client, f'{base_path}_address') - self.block: CatalogTree_Indexed_Block = CatalogTree_Indexed_Block(client, f'{base_path}_block') - self.output: CatalogTree_Indexed_Output = CatalogTree_Indexed_Output(client, f'{base_path}_output') - self.tx: CatalogTree_Indexed_Tx = CatalogTree_Indexed_Tx(client, f'{base_path}_tx') - self.txin: CatalogTree_Indexed_Txin = CatalogTree_Indexed_Txin(client, f'{base_path}_txin') - self.txout: CatalogTree_Indexed_Txout = CatalogTree_Indexed_Txout(client, f'{base_path}_txout') - -class CatalogTree_Indexed_Address: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.first_p2aaddressindex: MetricPattern26[P2AAddressIndex] = MetricPattern26(client, f'{base_path}_first_p2aaddressindex') - self.first_p2pk33addressindex: MetricPattern26[P2PK33AddressIndex] = MetricPattern26(client, f'{base_path}_first_p2pk33addressindex') - self.first_p2pk65addressindex: MetricPattern26[P2PK65AddressIndex] = MetricPattern26(client, f'{base_path}_first_p2pk65addressindex') - self.first_p2pkhaddressindex: MetricPattern26[P2PKHAddressIndex] = MetricPattern26(client, f'{base_path}_first_p2pkhaddressindex') - self.first_p2shaddressindex: MetricPattern26[P2SHAddressIndex] = MetricPattern26(client, f'{base_path}_first_p2shaddressindex') - self.first_p2traddressindex: MetricPattern26[P2TRAddressIndex] = MetricPattern26(client, f'{base_path}_first_p2traddressindex') - self.first_p2wpkhaddressindex: MetricPattern26[P2WPKHAddressIndex] = MetricPattern26(client, f'{base_path}_first_p2wpkhaddressindex') - self.first_p2wshaddressindex: MetricPattern26[P2WSHAddressIndex] = MetricPattern26(client, f'{base_path}_first_p2wshaddressindex') - self.p2abytes: MetricPattern30[P2ABytes] = MetricPattern30(client, f'{base_path}_p2abytes') - self.p2pk33bytes: MetricPattern32[P2PK33Bytes] = MetricPattern32(client, f'{base_path}_p2pk33bytes') - self.p2pk65bytes: MetricPattern33[P2PK65Bytes] = MetricPattern33(client, f'{base_path}_p2pk65bytes') - self.p2pkhbytes: MetricPattern34[P2PKHBytes] = MetricPattern34(client, f'{base_path}_p2pkhbytes') - self.p2shbytes: MetricPattern35[P2SHBytes] = MetricPattern35(client, f'{base_path}_p2shbytes') - self.p2trbytes: MetricPattern36[P2TRBytes] = MetricPattern36(client, f'{base_path}_p2trbytes') - self.p2wpkhbytes: MetricPattern37[P2WPKHBytes] = MetricPattern37(client, f'{base_path}_p2wpkhbytes') - self.p2wshbytes: MetricPattern38[P2WSHBytes] = MetricPattern38(client, f'{base_path}_p2wshbytes') - -class CatalogTree_Indexed_Block: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.blockhash: MetricPattern26[BlockHash] = MetricPattern26(client, f'{base_path}_blockhash') - self.difficulty: MetricPattern26[StoredF64] = MetricPattern26(client, f'{base_path}_difficulty') - self.timestamp: MetricPattern26[Timestamp] = MetricPattern26(client, f'{base_path}_timestamp') - self.total_size: MetricPattern26[StoredU64] = MetricPattern26(client, f'{base_path}_total_size') - self.weight: MetricPattern26[Weight] = MetricPattern26(client, f'{base_path}_weight') - -class CatalogTree_Indexed_Output: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.first_emptyoutputindex: MetricPattern26[EmptyOutputIndex] = MetricPattern26(client, f'{base_path}_first_emptyoutputindex') - self.first_opreturnindex: MetricPattern26[OpReturnIndex] = MetricPattern26(client, f'{base_path}_first_opreturnindex') - self.first_p2msoutputindex: MetricPattern26[P2MSOutputIndex] = MetricPattern26(client, f'{base_path}_first_p2msoutputindex') - self.first_unknownoutputindex: MetricPattern26[UnknownOutputIndex] = MetricPattern26(client, f'{base_path}_first_unknownoutputindex') - self.txindex: MetricPattern8[TxIndex] = MetricPattern8(client, f'{base_path}_txindex') - -class CatalogTree_Indexed_Tx: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.base_size: MetricPattern39[StoredU32] = MetricPattern39(client, f'{base_path}_base_size') - self.first_txindex: MetricPattern26[TxIndex] = MetricPattern26(client, f'{base_path}_first_txindex') - self.first_txinindex: MetricPattern39[TxInIndex] = MetricPattern39(client, f'{base_path}_first_txinindex') - self.first_txoutindex: MetricPattern39[TxOutIndex] = MetricPattern39(client, f'{base_path}_first_txoutindex') - self.height: MetricPattern39[Height] = MetricPattern39(client, f'{base_path}_height') - self.is_explicitly_rbf: MetricPattern39[StoredBool] = MetricPattern39(client, f'{base_path}_is_explicitly_rbf') - self.rawlocktime: MetricPattern39[RawLockTime] = MetricPattern39(client, f'{base_path}_rawlocktime') - self.total_size: MetricPattern39[StoredU32] = MetricPattern39(client, f'{base_path}_total_size') - self.txid: MetricPattern39[Txid] = MetricPattern39(client, f'{base_path}_txid') - self.txversion: MetricPattern39[TxVersion] = MetricPattern39(client, f'{base_path}_txversion') - -class CatalogTree_Indexed_Txin: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.first_txinindex: MetricPattern26[TxInIndex] = MetricPattern26(client, f'{base_path}_first_txinindex') - self.outpoint: MetricPattern27[OutPoint] = MetricPattern27(client, f'{base_path}_outpoint') - self.outputtype: MetricPattern27[OutputType] = MetricPattern27(client, f'{base_path}_outputtype') - self.txindex: MetricPattern27[TxIndex] = MetricPattern27(client, f'{base_path}_txindex') - self.typeindex: MetricPattern27[TypeIndex] = MetricPattern27(client, f'{base_path}_typeindex') - -class CatalogTree_Indexed_Txout: - """Catalog tree node.""" - - def __init__(self, client: BrkClientBase, base_path: str = ''): - self.first_txoutindex: MetricPattern26[TxOutIndex] = MetricPattern26(client, f'{base_path}_first_txoutindex') - self.outputtype: MetricPattern29[OutputType] = MetricPattern29(client, f'{base_path}_outputtype') - self.txindex: MetricPattern29[TxIndex] = MetricPattern29(client, f'{base_path}_txindex') - self.typeindex: MetricPattern29[TypeIndex] = MetricPattern29(client, f'{base_path}_typeindex') - self.value: MetricPattern29[Sats] = MetricPattern29(client, f'{base_path}_value') + self.annualized_volume: MetricPattern4[Sats] = MetricPattern4(client, f'{base_path}_annualized_volume') + self.annualized_volume_btc: MetricPattern4[Bitcoin] = MetricPattern4(client, f'{base_path}_annualized_volume_btc') + self.annualized_volume_usd: MetricPattern4[Dollars] = MetricPattern4(client, f'{base_path}_annualized_volume_usd') + self.inputs_per_sec: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_inputs_per_sec') + self.outputs_per_sec: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_outputs_per_sec') + self.sent_sum: ActiveSupplyPattern = ActiveSupplyPattern(client, 'sent_sum') + self.tx_per_sec: MetricPattern4[StoredF32] = MetricPattern4(client, f'{base_path}_tx_per_sec') class BrkClient(BrkClientBase): """Main BRK client with catalog tree and API methods.""" @@ -5231,29 +4928,29 @@ class BrkClient(BrkClientBase): Returns the list of indexes are supported by the specified metric. For example, `realized_price` might be available on dateindex, weekindex, and monthindex.""" return self.get(f'/api/metric/{metric}') - def get_metric_by_index(self, metric: Metric, index: Index, from_: Optional[Any] = None, to: Optional[Any] = None, count: Optional[Any] = None, format: Optional[Format] = None) -> AnyMetricData: + def get_metric_by_index(self, index: Index, metric: Metric, count: Optional[Any] = None, format: Optional[Format] = None, from_: Optional[Any] = None, to: Optional[Any] = None) -> AnyMetricData: """Get metric data. Fetch data for a specific metric at the given index. Use query parameters to filter by date range and format (json/csv).""" params = [] - if from_ is not None: params.append(f'from={from_}') - if to is not None: params.append(f'to={to}') if count is not None: params.append(f'count={count}') if format is not None: params.append(f'format={format}') + if from_ is not None: params.append(f'from={from_}') + if to is not None: params.append(f'to={to}') query = '&'.join(params) return self.get(f'/api/metric/{metric}/{index}{"?" + query if query else ""}') - def get_metrics_bulk(self, metrics: Metrics, index: Index, from_: Optional[Any] = None, to: Optional[Any] = None, count: Optional[Any] = None, format: Optional[Format] = None) -> List[AnyMetricData]: + def get_metrics_bulk(self, count: Optional[Any] = None, format: Optional[Format] = None, from_: Optional[Any] = None, index: Index, metrics: Metrics, to: Optional[Any] = None) -> List[AnyMetricData]: """Bulk metric data. Fetch multiple metrics in a single request. Supports filtering by index and date range. Returns an array of MetricData objects.""" params = [] - params.append(f'metrics={metrics}') - params.append(f'index={index}') - if from_ is not None: params.append(f'from={from_}') - if to is not None: params.append(f'to={to}') if count is not None: params.append(f'count={count}') if format is not None: params.append(f'format={format}') + if from_ is not None: params.append(f'from={from_}') + params.append(f'index={index}') + params.append(f'metrics={metrics}') + if to is not None: params.append(f'to={to}') query = '&'.join(params) return self.get(f'/api/metrics/bulk{"?" + query if query else ""}') @@ -5430,4 +5127,3 @@ class BrkClient(BrkClientBase): Returns the current version of the API server""" return self.get('/version') - diff --git a/scripts/publish.sh b/scripts/publish.sh index d3194ea46..dff3f16c4 100755 --- a/scripts/publish.sh +++ b/scripts/publish.sh @@ -3,7 +3,6 @@ set -e # Order determined by topological sort of dependency graph CRATES=( - brk_bundler brk_error brk_logger brk_traversable_derive diff --git a/websites/bitview/index.html b/websites/bitview/index.html index 31f13f3f7..0ca5651ec 100644 --- a/websites/bitview/index.html +++ b/websites/bitview/index.html @@ -1557,6 +1557,149 @@ } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/websites/bitview/scripts/options/chain.js b/websites/bitview/scripts/options/chain.js index 4268ed81b..b6e1ad284 100644 --- a/websites/bitview/scripts/options/chain.js +++ b/websites/bitview/scripts/options/chain.js @@ -9,7 +9,6 @@ import { Unit } from "../utils/units.js"; */ export function createChainSection(ctx) { const { colors, brk, s, createPriceLine } = ctx; - const { mergeMetricPatterns } = brk; const { blocks, transactions, @@ -33,7 +32,7 @@ export function createChainSection(ctx) { */ const fromBlockCount = (pattern, name, unit, sumColor, cumulativeColor) => [ s({ - metric: mergeMetricPatterns(pattern.base, pattern.sum), + metric: pattern.sum, name: `${name} sum`, color: sumColor, unit, @@ -55,7 +54,7 @@ export function createChainSection(ctx) { * @param {Unit} unit */ const fromBlockSize = (pattern, name, unit) => [ - s({ metric: pattern.distribution.average, name: `${name} avg`, unit }), + s({ metric: pattern.average, name: `${name} avg`, unit }), s({ metric: pattern.sum, name: `${name} sum`, @@ -85,35 +84,35 @@ export function createChainSection(ctx) { defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.pct10, + metric: pattern.percentiles.pct10, name: `${name} pct10`, color: colors.rose, unit, defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.pct25, + metric: pattern.percentiles.pct25, name: `${name} pct25`, color: colors.pink, unit, defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.median, + metric: pattern.percentiles.median, name: `${name} median`, color: colors.purple, unit, defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.pct75, + metric: pattern.percentiles.pct75, name: `${name} pct75`, color: colors.violet, unit, defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.pct90, + metric: pattern.percentiles.pct90, name: `${name} pct90`, color: colors.fuchsia, unit, @@ -159,35 +158,35 @@ export function createChainSection(ctx) { defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.pct10, + metric: pattern.percentiles.pct10, name: `${name} pct10`, color: colors.rose, unit, defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.pct25, + metric: pattern.percentiles.pct25, name: `${name} pct25`, color: colors.pink, unit, defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.median, + metric: pattern.percentiles.median, name: `${name} median`, color: colors.purple, unit, defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.pct75, + metric: pattern.percentiles.pct75, name: `${name} pct75`, color: colors.violet, unit, defaultActive: false, }), s({ - metric: pattern.distribution.percentiles.pct90, + metric: pattern.percentiles.pct90, name: `${name} pct90`, color: colors.fuchsia, unit, @@ -335,7 +334,7 @@ export function createChainSection(ctx) { */ const fromValuePattern = (pattern, name, sumColor, cumulativeColor) => [ s({ - metric: pattern.sats.base, + metric: pattern.sats.sum, name: `${name}`, color: sumColor, unit: Unit.sats, @@ -348,7 +347,7 @@ export function createChainSection(ctx) { defaultActive: false, }), s({ - metric: pattern.bitcoin.base, + metric: pattern.bitcoin.sum, name: `${name}`, color: sumColor, unit: Unit.btc, @@ -361,7 +360,7 @@ export function createChainSection(ctx) { defaultActive: false, }), s({ - metric: pattern.dollars.base, + metric: pattern.dollars.sum, name: `${name}`, color: sumColor, unit: Unit.usd, @@ -398,7 +397,7 @@ export function createChainSection(ctx) { defaultActive: false, }), s({ - metric: pattern.bitcoin.base, + metric: pattern.bitcoin.sum, name: `${name}`, color: sumColor, unit: Unit.btc, @@ -439,16 +438,6 @@ export function createChainSection(ctx) { name: "Dominance", title: `Mining Dominance of ${poolName}`, bottom: [ - s({ - metric: mergeMetricPatterns( - pool._1dDominance.base, - pool._1dDominance.sum, - ), - name: "1d", - color: colors.rose, - unit: Unit.percentage, - defaultActive: false, - }), s({ metric: pool._1wDominance, name: "1w", @@ -465,10 +454,7 @@ export function createChainSection(ctx) { defaultActive: false, }), s({ - metric: mergeMetricPatterns( - pool.dominance.base, - pool.dominance.sum, - ), + metric: pool.dominance, name: "all time", color: colors.teal, unit: Unit.percentage, @@ -481,10 +467,7 @@ export function createChainSection(ctx) { title: `Blocks mined by ${poolName}`, bottom: [ s({ - metric: mergeMetricPatterns( - pool.blocksMined.base, - pool.blocksMined.sum, - ), + metric: pool.blocksMined.sum, name: "Sum", unit: Unit.count, }), @@ -650,10 +633,7 @@ export function createChainSection(ctx) { title: "Transaction Volume", bottom: [ s({ - metric: mergeMetricPatterns( - transactions.volume.sentSum.sats.base, - transactions.volume.sentSum.sats.rest, - ), + metric: transactions.volume.sentSum.sats.sum, name: "Sent", unit: Unit.sats, }), @@ -663,10 +643,7 @@ export function createChainSection(ctx) { unit: Unit.btc, }), s({ - metric: mergeMetricPatterns( - transactions.volume.sentSum.dollars.base, - transactions.volume.sentSum.dollars.rest, - ), + metric: transactions.volume.sentSum.dollars, name: "Sent", unit: Unit.usd, }), diff --git a/websites/bitview/scripts/options/cohorts/address.js b/websites/bitview/scripts/options/cohorts/address.js index a4ac23a86..ab970705f 100644 --- a/websites/bitview/scripts/options/cohorts/address.js +++ b/websites/bitview/scripts/options/cohorts/address.js @@ -198,7 +198,6 @@ function createRealizedCapWithExtras(ctx, list, args, useGroupName) { */ function createRealizedPnlSection(ctx, args, title) { const { colors, s } = ctx; - const { mergeMetricPatterns } = ctx.brk; const { realized } = args.tree; return [ @@ -234,10 +233,13 @@ function createRealizedPnlSection(ctx, args, title) { unit: Unit.usd, }), s({ - metric: mergeMetricPatterns( - realized.negRealizedLoss.base, - realized.negRealizedLoss.sum, - ), + metric: realized.negRealizedLoss.sum, + name: "Negative Loss", + color: colors.red, + unit: Unit.usd, + }), + s({ + metric: realized.negRealizedLoss.cumulative, name: "Negative Loss", color: colors.red, unit: Unit.usd, @@ -360,7 +362,6 @@ function createCostBasisSection(ctx, list, useGroupName, title) { */ function createActivitySection(ctx, list, useGroupName, title) { const { s, brk } = ctx; - const { mergeMetricPatterns } = brk; return [ { @@ -371,10 +372,13 @@ function createActivitySection(ctx, list, useGroupName, title) { title: `Coinblocks Destroyed ${title}`, bottom: list.flatMap(({ color, name, tree }) => [ s({ - metric: mergeMetricPatterns( - tree.activity.coinblocksDestroyed.base, - tree.activity.coinblocksDestroyed.sum, - ), + metric: tree.activity.coinblocksDestroyed.sum, + name: useGroupName ? name : "Coinblocks", + color, + unit: Unit.coinblocks, + }), + s({ + metric: tree.activity.coinblocksDestroyed.cumulative, name: useGroupName ? name : "Coinblocks", color, unit: Unit.coinblocks, @@ -386,10 +390,13 @@ function createActivitySection(ctx, list, useGroupName, title) { title: `Coindays Destroyed ${title}`, bottom: list.flatMap(({ color, name, tree }) => [ s({ - metric: mergeMetricPatterns( - tree.activity.coindaysDestroyed.base, - tree.activity.coindaysDestroyed.sum, - ), + metric: tree.activity.coindaysDestroyed.sum, + name: useGroupName ? name : "Coindays", + color, + unit: Unit.coindays, + }), + s({ + metric: tree.activity.coindaysDestroyed.cumulative, name: useGroupName ? name : "Coindays", color, unit: Unit.coindays, diff --git a/websites/bitview/scripts/options/cohorts/utxo.js b/websites/bitview/scripts/options/cohorts/utxo.js index fba34a345..c7595dbe8 100644 --- a/websites/bitview/scripts/options/cohorts/utxo.js +++ b/websites/bitview/scripts/options/cohorts/utxo.js @@ -228,7 +228,12 @@ function createRealizedPriceOptions(ctx, args, title) { name: "price", title: `Realized Price ${title}`, top: [ - s({ metric: tree.realized.realizedPrice, name: "realized", color, unit: Unit.usd }), + s({ + metric: tree.realized.realizedPrice, + name: "realized", + color, + unit: Unit.usd, + }), ], }, ]; @@ -275,7 +280,11 @@ function createRealizedCapWithExtras(ctx, list, args, useGroupName) { options: { baseValue: { price: 100 } }, colors: [colors.red, colors.green], }), - createPriceLine({ unit: Unit.pctOwnMcap, defaultActive: true, number: 100 }), + createPriceLine({ + unit: Unit.pctOwnMcap, + defaultActive: true, + number: 100, + }), ] : []), ]); @@ -290,7 +299,6 @@ function createRealizedCapWithExtras(ctx, list, args, useGroupName) { */ function createRealizedPnlSection(ctx, args, title) { const { colors, s, brk } = ctx; - const { mergeMetricPatterns } = brk; const { tree } = args; return [ @@ -335,10 +343,13 @@ function createRealizedPnlSection(ctx, args, title) { unit: Unit.usd, }), s({ - metric: mergeMetricPatterns( - tree.realized.negRealizedLoss.base, - tree.realized.negRealizedLoss.sum, - ), + metric: tree.realized.negRealizedLoss.sum, + name: "Negative Loss", + color: colors.red, + unit: Unit.usd, + }), + s({ + metric: tree.realized.negRealizedLoss.cumulative, name: "Negative Loss", color: colors.red, unit: Unit.usd, @@ -509,7 +520,6 @@ function createCostBasisSectionBasic(ctx, list, useGroupName, title) { */ function createActivitySection(ctx, list, useGroupName, title) { const { s, brk } = ctx; - const { mergeMetricPatterns } = brk; return [ { diff --git a/websites/bitview/scripts/options/cointime.js b/websites/bitview/scripts/options/cointime.js index eacde4ff5..8901f9fa5 100644 --- a/websites/bitview/scripts/options/cointime.js +++ b/websites/bitview/scripts/options/cointime.js @@ -167,7 +167,6 @@ function createCointimePriceWithRatioOptions( */ export function createCointimeSection(ctx) { const { colors, brk, s } = ctx; - const { mergeMetricPatterns } = brk; const { cointime, distribution, supply } = brk.tree.computed; const { pricing, cap, activity, supply: cointimeSupply, adjusted } = cointime; const { all } = distribution.utxoCohorts; @@ -381,10 +380,7 @@ export function createCointimeSection(ctx) { bottom: [ // Destroyed comes from the all cohort's activity s({ - metric: mergeMetricPatterns( - all.activity.coinblocksDestroyed.base, - all.activity.coinblocksDestroyed.sum, - ), + metric: all.activity.coinblocksDestroyed.sum, name: "Destroyed", color: colors.red, unit: Unit.coinblocks, @@ -398,10 +394,7 @@ export function createCointimeSection(ctx) { }), // Created and stored from cointime s({ - metric: mergeMetricPatterns( - activity.coinblocksCreated.base, - activity.coinblocksCreated.sum, - ), + metric: activity.coinblocksCreated.sum, name: "Created", color: colors.orange, unit: Unit.coinblocks, @@ -414,10 +407,7 @@ export function createCointimeSection(ctx) { unit: Unit.coinblocks, }), s({ - metric: mergeMetricPatterns( - activity.coinblocksStored.base, - activity.coinblocksStored.sum, - ), + metric: activity.coinblocksStored.sum, name: "Stored", color: colors.green, unit: Unit.coinblocks, @@ -464,10 +454,7 @@ export function createCointimeSection(ctx) { title: "Cointime-Adjusted Transactions Velocity", bottom: [ s({ - metric: mergeMetricPatterns( - supply.velocity.btc.dateindex, - supply.velocity.btc.rest, - ), + metric: supply.velocity.btc.dateindex, name: "BTC", color: colors.orange, unit: Unit.ratio, diff --git a/websites/bitview/service-worker.js b/websites/bitview/service-worker.js index f423cd195..7a9928e83 100644 --- a/websites/bitview/service-worker.js +++ b/websites/bitview/service-worker.js @@ -1,114 +1,96 @@ -// DO NOT CHANGE, Exact format is expected in `brk_bundler` -const CACHE_VERSION = "__VERSION__"; +const CACHE = "v1"; +const ROOT = "/"; +const API = "/api"; -const SHELL_FILES = ["/", "/index.html"]; +const BYPASS = new Set([ + "/changelog", "/crate", "/discord", "/github", "/health", + "/install", "/mcp", "/nostr", "/service", "/status", "/version" +]); + +// Match hashed filenames: name.abc12345.js/mjs/css +const HASHED_RE = /\.[0-9a-f]{8}\.(js|mjs|css)$/; /** @type {ServiceWorkerGlobalScope} */ const sw = /** @type {any} */ (self); -sw.addEventListener("install", (event) => { - console.log("sw: install"); - event.waitUntil( - caches - .open(CACHE_VERSION) - .then((c) => c.addAll(SHELL_FILES)) - .then(() => sw.skipWaiting()), +const offline = () => new Response("Offline", { + status: 503, + headers: { "Content-Type": "text/plain" } +}); + +sw.addEventListener("install", (e) => { + e.waitUntil( + caches.open(CACHE) + .then((c) => c.addAll([ROOT])) + .then(() => sw.skipWaiting()) ); }); -sw.addEventListener("activate", (event) => { - console.log("sw: active"); - event.waitUntil( +sw.addEventListener("activate", (e) => { + e.waitUntil( Promise.all([ sw.clients.claim(), - caches - .keys() - .then((keys) => - Promise.all( - keys - .filter((key) => key !== CACHE_VERSION) - .map((key) => caches.delete(key)), - ), - ), - ]), + caches.keys().then((keys) => + Promise.all(keys.filter((k) => k !== CACHE).map((k) => caches.delete(k))) + ), + ]) ); }); -async function indexHTMLOrOffline() { - return caches.match("/index.html").then((cached) => { - if (cached) return cached; - return new Response("Offline and no cached version", { - status: 503, - statusText: "Service Unavailable", - headers: { "Content-Type": "text/plain" }, - }); - }); -} - sw.addEventListener("fetch", (event) => { const req = event.request; const url = new URL(req.url); - // 1) Bypass API calls & non-GETs - if ( - req.method !== "GET" || - url.pathname.startsWith("/api") || - url.pathname === "/changelog" || - url.pathname === "/crate" || - url.pathname === "/discord" || - url.pathname === "/github" || - url.pathname === "/health" || - url.pathname === "/install" || - url.pathname === "/mcp" || - url.pathname === "/nostr" || - url.pathname === "/service" || - url.pathname === "/status" || - url.pathname === "/version" - ) { - return; // let the browser handle it - } + // Only handle same-origin GET requests + if (req.method !== "GET" || url.origin !== location.origin) return; - const cache = caches.open(CACHE_VERSION); + const path = url.pathname; - // 2) NAVIGATION: network‐first on your shell + // Bypass API and redirects + if (path.startsWith(API) || BYPASS.has(path)) return; + + // Navigation: network-first for shell if (req.mode === "navigate") { event.respondWith( - // Always fetch index.html - fetch("/index.html") - .then((response) => { - // If we got a valid 2xx back, cache it (optional) and return it - if (response.ok || response.status === 304) { - if (response.ok) { - const clone = response.clone(); - cache.then((cache) => cache.put("/index.html", clone)); - } - return response; - } - throw new Error("Non-2xx on shell"); + fetch(ROOT) + .then((res) => { + if (res.ok) caches.open(CACHE).then((c) => c.put(ROOT, res.clone())); + return res; }) - // On any failure, fall back to the cached shell - .catch(indexHTMLOrOffline), + .catch(() => caches.match(ROOT).then((c) => c || offline())) ); return; } - // 3) For all other GETs: network-first, fallback to cache + // Hashed assets: cache-first (immutable) + if (HASHED_RE.test(path)) { + event.respondWith( + caches.match(req) + .then((cached) => + cached || + fetch(req).then((res) => { + if (res.ok) caches.open(CACHE).then((c) => c.put(req, res.clone())); + return res; + }) + ) + .catch(() => offline()) + ); + return; + } + + // Other: network-first with cache fallback + // SPA routes (no extension) fall back to ROOT, static assets get 503 + const isStatic = path.includes(".") && !path.endsWith(".html"); event.respondWith( fetch(req) - .then((response) => { - if (response.ok) { - const clone = response.clone(); - cache.then((cache) => cache.put(req, clone)); - } - return response; + .then((res) => { + if (res.ok) caches.open(CACHE).then((c) => c.put(req, res.clone())); + return res; }) - .catch(async () => - caches - .match(req) - .then((cached) => { - return cached || indexHTMLOrOffline(); - }) - .catch(indexHTMLOrOffline), - ), + .catch(() => + caches.match(req).then((cached) => + cached || (isStatic ? offline() : caches.match(ROOT).then((c) => c || offline())) + ) + ) ); });